branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>from distutils.core import setup
from setuptools import find_packages
setup(
name='py-auth0-jwt-rest',
version='0.1.7',
url='https://github.com/hms-dbmi/py-auth0-jwt-rest',
author='<NAME>',
author_email='<EMAIL>',
packages=['pyauth0jwtrest',],
license='Creative Commons Attribution-Noncommercial-Share Alike license',
install_requires=[
'django>=1.10.0',
'djangorestframework>=1.9.0',
'djangorestframework-jwt>=1.7.2',
'cryptography',
'requests']
)
<file_sep>from django.apps import AppConfig
class Auth0JWTRestConfig(AppConfig):
name = 'pyauth0jwtrest'
verbose_name = 'Auth0 JWT Authentication for Django Rest Framework'
<file_sep>import base64
from django.contrib.auth.backends import RemoteUserBackend, get_user_model
from django.utils.translation import ugettext as _
from rest_framework import exceptions
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
import jwt
from pyauth0jwtrest.settings import jwt_api_settings, auth0_api_settings
from pyauth0jwtrest.utils import get_auth0_public_key, get_jwt_value
jwt_decode_handler = jwt_api_settings.JWT_DECODE_HANDLER
jwt_get_username_from_payload = jwt_api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER
import logging
logger = logging.getLogger(__name__)
class Auth0JSONWebTokenAuthentication(JSONWebTokenAuthentication, RemoteUserBackend):
"""
Clients should authenticate by passing the token key in the "Authorization"
HTTP header, prepended with the string specified in the setting
`JWT_AUTH_HEADER_PREFIX`. For example:
Authorization: JWT <KEY>
"""
def authenticate(self, request):
"""
You should pass a header of your request: clientcode: web
This function initialize the settings of JWT with the specific client's informations.
"""
# Determine which Auth0 Client ID (aud) the JWT pertains to.
try:
jwt_string = get_jwt_value(request)
auth0_client_id = str(jwt.decode(jwt_string, verify=False)['aud'])
except Exception as e:
msg = _('Failed to get the aud from jwt payload')
raise exceptions.AuthenticationFailed(msg)
# Check that the Client ID is in the allowed list of Auth0 Client IDs for this application
allowed_auth0_client_id_list = auth0_api_settings.CLIENT_ID_LIST
if auth0_client_id not in allowed_auth0_client_id_list:
msg = _('Auth0 Client ID not allowed')
raise exceptions.AuthenticationFailed(msg)
# Set the JWT_AUDIENCE for this request to the accepted Auth0 Client ID
jwt_api_settings.JWT_AUDIENCE = auth0_client_id
jwt_api_settings.JWT_ALGORITHM = auth0_api_settings.ALGORITHM
jwt_api_settings.JWT_AUTH_HEADER_PREFIX = auth0_api_settings.JWT_AUTH_HEADER_PREFIX
# RS256 Related configurations
if auth0_api_settings.ALGORITHM.upper() == "HS256":
if auth0_api_settings.CLIENT_SECRET_BASE64_ENCODED:
jwt_api_settings.JWT_SECRET_KEY = base64.b64decode(
auth0_api_settings.CLIENT_SECRET.replace("_", "/").replace("-", "+")
)
else:
jwt_api_settings.JWT_SECRET_KEY = auth0_api_settings.CLIENT_SECRET
# If RS256, call the utility method to load the public cert from Auth0
elif auth0_api_settings.ALGORITHM.upper() == "RS256":
jwt_api_settings.JWT_PUBLIC_KEY = get_auth0_public_key(auth0_api_settings.DOMAIN)
return super(Auth0JSONWebTokenAuthentication, self).authenticate(request)
def authenticate_credentials(self, payload):
"""
Once Django Rest Framework calls this method, it can be assumed that the JWT has been
verified. If the application requires users to exist, create a user if one is not found.
Returns a Django user or username.
"""
UserModel = get_user_model()
username = jwt_get_username_from_payload(payload)
if not username:
msg = _('Invalid payload.')
raise exceptions.AuthenticationFailed(msg)
user = None
if auth0_api_settings.REQUIRE_USERS:
# Check for email property, and if so assign it to both username and email
if auth0_api_settings.USERNAME_FIELD == 'email':
user, created = UserModel.objects.get_or_create(username=username, email=username)
else:
user, created = UserModel._default_manager.get_or_create(**{
UserModel.USERNAME_FIELD: username
})
return user if self.user_can_authenticate(user) else None
else:
return username
<file_sep>from django.conf import settings
from rest_framework.settings import APISettings
from rest_framework_jwt.settings import api_settings as jwt_api_settings
USER_SETTINGS = getattr(settings, 'AUTH0', None)
DEFAULTS = {
'ALGORITHM': 'RS256',
'CLIENT_ID': getattr(settings, 'AUTH0_CLIENT_ID', None),
'CLIENT_ID_LIST': getattr(settings, 'AUTH0_CLIENT_ID_LIST', None),
'DOMAIN': getattr(settings, 'AUTH0_DOMAIN', None),
'JWT_AUTH_HEADER_PREFIX': 'JWT',
'USERNAME_FIELD': 'email',
'JWT_PAYLOAD_GET_USERNAME_HANDLER': 'pyauth0jwtrest.utils.auth0_get_username_from_payload_handler',
'REQUIRE_USERS': True,
'CLIENT_SECRET_BASE64_ENCODED': True,
'CLIENT_SECRET': getattr(settings, 'AUTH0_CLIENT_SECRET', None),
}
# List of settings that may be in string import notation.
IMPORT_STRINGS = (
'JWT_PAYLOAD_GET_USERNAME_HANDLER',
)
auth0_api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
jwt_api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER = auth0_api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER
# Sometimes the iat timestamp from Auth0 is ahead of our localhost or our AWS servers.
# Use the leeway setting to prevent "iat > now + leeway" errors.
jwt_api_settings.JWT_LEEWAY = 60
<file_sep># py-auth0-jwt-rest
<file_sep>import requests
import jwt
from cryptography.x509 import load_pem_x509_certificate
from cryptography.hazmat.backends import default_backend
from django.utils.encoding import smart_text
from django.utils.translation import ugettext as _
from rest_framework import exceptions
from rest_framework.authentication import get_authorization_header
from pyauth0jwtrest.settings import auth0_api_settings
# Handlers --------------------------------------------------------------------
def auth0_get_username_from_payload_handler(payload):
username = payload.get(auth0_api_settings.USERNAME_FIELD)
return username
# Authorization Utils ---------------------------------------------------------
def get_jwt_value(request):
auth = get_authorization_header(request).split()
auth_header_prefix = auth0_api_settings.JWT_AUTH_HEADER_PREFIX.lower()
if not auth or smart_text(auth[0].lower()) != auth_header_prefix:
return None
if len(auth) == 1:
msg = _('Invalid Authorization header. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid Authorization header. Credentials string '
'should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
return auth[1]
def get_email_from_request(request):
jwt_value = get_jwt_value(request)
return str(jwt.decode(jwt_value, verify=False)['email'])
def get_auth0_public_key(auth0_domain):
# Get the pub key from the endpoint
jwk_json = requests.get("https://" + auth0_domain + "/.well-known/jwks.json").json()
# Build it from the JSON
cert = '-----BEGIN CERTIFICATE-----\n' + jwk_json['keys'][0]['x5c'][0] + '\n-----END CERTIFICATE-----'
certificate = load_pem_x509_certificate(str.encode(cert), default_backend())
return certificate.public_key()
# Auth0 Metadata --------------------------------------------------------------
def get_app_metadata_from_payload(payload):
app_metadata = payload.get('app_metadata')
return app_metadata
def get_user_metadata_from_payload(payload):
user_metadata = payload.get('user_metadata')
return user_metadata
| c6eab2a1395fabfe40520ca8149cb52e89af2150 | [
"Markdown",
"Python"
] | 6 | Python | hms-dbmi/py-auth0-jwt-rest | f50798da4a83fc926515291e9dc2f058cd66e355 | 526011fef26924c13ac06f26ad42d2859ce4ced3 |
refs/heads/main | <repo_name>eczeps/gwwc-data-table-react<file_sep>/src/component/App.js
import './App.css';
import DataTable from './DataTable'
import { useState, useEffect } from 'react';
import firebase from 'firebase/app';
import 'firebase/database';
firebase.initializeApp({
apiKey: "<KEY>",
authDomain: "gwwc-data-table.firebaseapp.com",
databaseURL: "https://gwwc-data-table.firebaseio.com",
projectId: "gwwc-data-table",
storageBucket: "gwwc-data-table.appspot.com",
messagingSenderId: "924988001345",
appId: "1:924988001345:web:655254cb48b7162b6f4ac0",
measurementId: "G-TC4E5JES7F"
})
const database = firebase.database();
function App() {
const [organizations, setOrganizations] = useState([]);
useEffect(() => {
database.ref('/organisations/').once('value').then((snapshot) => {
setOrganizations(snapshot.val());
});
}, [])
return (
<div className="App">
<DataTable organizations={organizations}></DataTable>
</div>
);
}
export default App;
<file_sep>/src/component/DataTable.js
import React, { useMemo } from 'react';
import { useTable } from 'react-table';
import MUITable from './MUITable';
function getColumns() {
return [
{
accessor: 'charity',
Header: 'Charity',
},
{
accessor: 'cause',
Header: 'Cause Area',
},
{
accessor: 'totalDonations',
Header: 'Total Donations',
},
{
accessor: 'gwwcDonations',
Header: 'GWWC Donations',
},
{
accessor: 'slug',
id: 'donate',
Header: 'Donate',
Cell: ({ value }) => {
// TODO: update url
const url = `https://app.effectivealtruism.org/donations/new/allocation?allocation[${value}]=100`;
return <a href={url}>Donate</a>;
},
disableSortBy: true,
},
{
accessor: 'slug',
id: 'learnMore',
Header: 'Learn More',
Cell: ({ value }) => {
// TODO: update url
const url = `https://app.effectivealtruism.org/funds/${value}`;
return <a href={url}>Learn More</a>;
},
disableSortBy: true,
},
];
}
function filterEmpty(str) {
// TODO handle null value filtering upstream in excel sheet import to firebase
if (str == '&null') return '';
if (str == '#N/A') return '';
return str;
}
function createRows(organizations) {
const data = Object.entries(organizations);
return data.map(([slug, item]) => ({
slug,
charity: filterEmpty(item['Full Name']),
cause: filterEmpty(item['Core Cause']),
totalDonations: filterEmpty(item['Total Donations']),
gwwcDonations: filterEmpty(item['GWWC &dollar donated']),
})).filter(x => !!x['charity']);
}
function DataTable(props) {
const { organizations } = props;
const data = useMemo(() => createRows(organizations), [organizations]);
const columns = useMemo(getColumns, []);
return (
<MUITable data={data} columns={columns} />
)
}
export default DataTable;
<file_sep>/src/component/Pagination.js
import React from 'react'
import FirstPageIcon from '@material-ui/icons/FirstPage'
import IconButton from '@material-ui/core/IconButton'
import KeyboardArrowLeft from '@material-ui/icons/KeyboardArrowLeft'
import KeyboardArrowRight from '@material-ui/icons/KeyboardArrowRight'
import LastPageIcon from '@material-ui/icons/LastPage'
import { makeStyles } from '@material-ui/core/styles'
import PropTypes from 'prop-types'
const useStyles = makeStyles(theme => ({
root: {
flexShrink: 0,
marginLeft: theme.spacing(2.5),
},
}))
const Pagination = props => {
const classes = useStyles();
const { count, page, rowsPerPage, onChangePage } = props;
const lastPage = Math.ceil(count / rowsPerPage) - 1;
const goToPage = page => event => {
onChangePage(event, page);
}
return (
<div className={classes.root}>
<IconButton onClick={goToPage(0)} disabled={page === 0}>
<FirstPageIcon />
</IconButton>
<IconButton onClick={goToPage(page - 1)} disabled={page === 0}>
<KeyboardArrowLeft />
</IconButton>
<IconButton onClick={goToPage(page + 1)} disabled={page >= lastPage}>
<KeyboardArrowRight />
</IconButton>
<IconButton onClick={goToPage(Math.max(0, lastPage))} disabled={page >= lastPage}>
<LastPageIcon />
</IconButton>
</div>
)
}
Pagination.propTypes = {
count: PropTypes.number.isRequired,
onChangePage: PropTypes.func.isRequired,
page: PropTypes.number.isRequired,
rowsPerPage: PropTypes.number.isRequired,
};
export default Pagination;
| 5f904d6b69b0c0be772f5fd1111e5a6daeeca519 | [
"JavaScript"
] | 3 | JavaScript | eczeps/gwwc-data-table-react | 8f79caf2dc4e66433fce8ffd26a2a8138eaf4c1c | 36e90ff9486173573022f9131a214656ebe57edd |
refs/heads/master | <file_sep>const R = require('ramda')
const log = require('debug')('q')
const isVar = x => {
return String(x).startsWith('?')
}
function q(datalogQuery, inputs) {
let clauses = datalogQuery.clauses
let context = {
rels: [],
inputs
}
// given an index mapping of matching positions
// filter the input to only datoms that match those
// this would incur a db search if supported
// (otherwise full scan on inputs occurs)
const filterInput = (index, inputs) => {
return inputs.filter(datom => {
let tests = R.keys(index).map(iStr => {
let i = Number(iStr)
let val = datom[i]
let expected = index[i]
return R.equals(val, expected)
})
return R.all(x => !!x, tests)
})
}
const makeRel = (ctx, clause) => {
let symbols = clause.filter(isVar)
// create a mapping of index to constant
// this is used to filter the input source to match the
// clause
let index = R.range(0, 4).reduce((acc, i) => {
let elem = clause[i]
if (!isVar(elem) && !!elem) {
acc[i] = elem
}
return acc
}, {})
// construct a mapping of logic var to position
// in the clause for use during hashjoin
let offsetMap = R.pipe(
clauses =>
clauses.map((elem, i) => {
if (isVar(elem)) {
return [elem, i]
}
}),
R.filter(x => !!x),
R.fromPairs
)(clause)
let coll = filterInput(index, ctx.inputs)
return {
symbols,
offsetMap,
coll
}
}
// create a new relation by comparing the coll values in rel2
// with those in rel1, keeping only the values that match for each
// logic var
// TODO hash against the smaller collection
const hashJoin = (rel1, rel2) => {
let combinedSyms = R.uniq(R.concat(rel1.symbols, rel2.symbols))
let sharedSyms = R.intersection(rel1.symbols, rel2.symbols)
let hashTable = new Map()
for (bindingVar in rel1.offsetMap) {
hashTable.set(bindingVar, new Set())
}
for (tuple of rel1.coll) {
for (bindingVar in rel1.offsetMap) {
let idx = rel1.offsetMap[bindingVar]
let value = tuple[idx]
hashTable.get(bindingVar).add(value)
}
}
let intersectionColl = rel2.coll.filter(tuple => {
let tests = []
for (bindingVar of sharedSyms) {
let idx = rel2.offsetMap[bindingVar]
let value = tuple[idx]
let test = hashTable.get(bindingVar).has(value)
tests.push(test)
}
return tests.every(x => !!x)
})
let combinedRelation = {
symbols: combinedSyms,
// what should offset map be in the case of "ref" types
// where the same binding var is used in both positions?
offsetMap: R.merge(rel1.offsetMap, rel2.offsetMap),
// unclear on whether the combined relation should have both
// relation's collections.
coll: intersectionColl,
joined: true
}
return combinedRelation
}
const hashJoinRel = (ctx, rel2) => {
let rel1 = ctx.rels
return hashJoin(rel1, rel2)
}
const applyFind = (datalogQuery, ctx) => {
const findVars = datalogQuery.findVars
const returnSet = new Set()
log(`applying find vars ${findVars}`)
const joinedRel = ctx.rels
for (tuple of joinedRel.coll) {
let row = new Set()
for (bindingVar of findVars) {
let idx = joinedRel.offsetMap[bindingVar]
let value = tuple[idx]
row.add(value)
}
returnSet.add(row)
}
return returnSet
}
let reducedCtx = clauses.reduce((ctx, clause) => {
let [e, a, v = '_'] = clause
log('creating relation for clause', clause)
let rel = makeRel(ctx, clause)
log('created relation of length', rel.coll.length)
if (R.isEmpty(ctx.rels)) {
ctx.rels = rel
return ctx
}
log('running hash join')
let joinedRel = hashJoinRel(ctx, rel)
log('completed hash join')
ctx.rels = joinedRel
return ctx
}, context)
// TODO
// apply find spec
// perform find
let final = applyFind(datalogQuery, reducedCtx)
log('done')
return final
}
module.exports = q
<file_sep>class DatalogQuery {
constructor(props) {
this.findVars = null
this.clauses = []
}
find(...findVars) {
this.findVars = findVars
return this
}
in() {
return this
}
where(...bindingVars) {
this.clauses.push(bindingVars)
return this
}
}
module.exports = {
DatalogQuery
}
<file_sep>
### DataScript notes
query is achieved by:
- reduces over each clause, modifying a context variable
- for each clause it creates a "relation" which is a tuple of the
parts of the query and the (filtered | searched) datoms that match
the pattern
- filtering of the coll is accomplished by
- mapping the array index to the value being tested
- filtering coll if a[i] = v
- each relation is hash-joined into the context
building up a single relation which maps across all logic vars
- the find specification is then evaluated on the summed relation
<file_sep>const R = require('ramda')
const { DatalogQuery } = require('../src/query')
const q = require('../src/q')
const attrs = [':user/age', ':user/size', ':user/speed']
const values = [25, 50, 150]
const randNth = seq => {
let max = seq.length
let r = Math.floor(Math.random() * max)
return seq[r]
}
let rnd = randNth(attrs)
const n = 1000000
const data = R.range(0, n).map(i => {
return [i, randNth(attrs), randNth(values)]
})
data.push([6000, ':user/age', 50])
data.push([6000, ':user/size', 90])
data.push([6001, ':user/age', 50])
data.push([6001, ':user/size', 91])
let myQ = new DatalogQuery({})
let query = myQ
.find('?e', '?s')
.where('?e', ':user/age', 50)
.where('?e', ':user/size', '?s')
let start = Date.now()
let res = q(query, data)
let end = Date.now()
let elapsed = end - start
console.log(res)
console.log(`${data.length} rows, ${elapsed}ms`)
| d8562ff6a0e90475f04b977f344bbdd8580158af | [
"JavaScript",
"Text"
] | 4 | JavaScript | bhurlow/statalog | 4b9cc2c824aed3f5b566fbc977fc014726411f43 | 3ed0eac55e0112d031a43cec6abdfa8642d20b4a |
refs/heads/master | <file_sep>require 'capybara/dsl'
require_relative '../lib/bbc_site'
# include the bbc site
RSpec.configure do |config|
config.formatter = :documentation
end
Capybara.register_driver(:chrome) do |app|
Capybara::Selenium::Driver.new(app, :browser => :chrome)
end
Capybara.configure do |config|
config.ignore_hidden_elements = false
config.default_max_wait_time = 10
config.default_driver = :chrome
end<file_sep>require 'spec_helper'
describe 'Incorrect user details produces valid error' do
context 'it should respond with the correct error when incorrect details are input' do
it 'should produce an error clicking sign-in with blank fields' do
@bbc_site = BbcSite.new
@bbc_site.bbc_homepage.visit_homepage
@bbc_site.bbc_homepage.click_sign_in_link
@bbc_site.bbc_sign_in_page.click_sign_in_button
expect(@bbc_site.bbc_sign_in_page.get_general_error_message).to eq "Sorry, those details don't match. Check you've typed them correctly."
expect(@bbc_site.bbc_sign_in_page.get_password_error_message).to eq "Something's missing. Please check and try again."
expect(@bbc_site.bbc_sign_in_page.get_password_error_message).to eq "Something's missing. Please check and try again."
end
it 'should produce a general error message and password when inputting an incorrect username' do
@bbc_site = BbcSite.new
@bbc_site.bbc_homepage.visit_homepage
@bbc_site.bbc_homepage.click_sign_in_link
@bbc_site.bbc_sign_in_page.input_invalid_username
@bbc_site.bbc_sign_in_page.click_sign_in_button
expect(@bbc_site.bbc_sign_in_page.get_general_error_message).to eq "Sorry, those details don't match. Check you've typed them correctly."
expect(@bbc_site.bbc_sign_in_page.get_password_error_message).to eq "Something's missing. Please check and try again."
end
it 'should produce an invalid username message when the username does not exist but the password is valid' do
@bbc_site = BbcSite.new
@bbc_site.bbc_homepage.visit_homepage
@bbc_site.bbc_homepage.click_sign_in_link
@bbc_site.bbc_sign_in_page.input_invalid_username
@bbc_site.bbc_sign_in_page.input_acceptable_password
@bbc_site.bbc_sign_in_page.click_sign_in_button
expect(@bbc_site.bbc_sign_in_page.get_username_error_message).to eq "Sorry, we can’t find an account with that username. If you're over 13, try your email address instead or get help here."
end
it 'should produce error message for when the password is too short' do
@bbc_site = BbcSite.new
@bbc_site.bbc_homepage.visit_homepage
@bbc_site.bbc_homepage.click_sign_in_link
@bbc_site.bbc_sign_in_page.input_short_password
@bbc_site.bbc_sign_in_page.click_sign_in_button
expect(@bbc_site.bbc_sign_in_page.get_password_error_message).to eq "Sorry, that password is too short. It needs to be eight characters or more."
end
it 'should produce an error message when password only contain letters' do
@bbc_site = BbcSite.new
@bbc_site.bbc_homepage.visit_homepage
@bbc_site.bbc_homepage.click_sign_in_link
@bbc_site.bbc_sign_in_page.input_character_only_password
@bbc_site.bbc_sign_in_page.click_sign_in_button
expect(@bbc_site.bbc_sign_in_page.get_password_error_message).to eq "Sorry, that password isn't valid. Please include something that isn't a letter."
end
it 'should produce an error message when the wrong password is used to login an existing account' do
@bbc_site = BbcSite.new
@bbc_site.bbc_homepage.visit_homepage
@bbc_site.bbc_homepage.click_sign_in_link
@bbc_site.bbc_sign_in_page.input_valid_username
@bbc_site.bbc_sign_in_page.input_acceptable_password
@bbc_site.bbc_sign_in_page.click_sign_in_button
expect(@bbc_site.bbc_sign_in_page.get_password_error_message).to eq "Uh oh, that password doesn’t match that account. Please try again."
end
end
end<file_sep>#superclass - include all pages in here
require_relative 'pages/bbc_homepage'
require_relative 'pages/bbc_sign_in_page'
class BbcSite
def bbc_homepage
#instantiate the class bbc_homepage
BbcHomepage.new
end
def bbc_sign_in_page
#instantiate the class bbc_sign_in_page
BbcSignInPage.new
end
end<file_sep># Capybara POM
## Introduction to Page Object Model and Capybara
### Aim
This project aimed to test the sign-in form in BBC website by simulating all the possible wrong scenarions that may come up as a user is trying to login. This project uses Page Object Model which separates the testing framework from the all the methods and logic. This process keeps code clean, easy to understand, easy reusable and maintanable.
### Technologies used
The system testing was done using Capybara. Capybara is a web-base automation tool for testing and allows to simulate scenarios for different user stories and automates web-application.
| 7ced6f93b48af9a1049f5fec295cd2bdd15fe22d | [
"Markdown",
"Ruby"
] | 4 | Ruby | Rubenfppinto/Capybara_BBC_Testing_lab | 0105c6af0ac12a1ed248f7dcc9ad190afd186c71 | 73a31437a4b3056066934b91c606b9f2530f656f |
refs/heads/master | <repo_name>beeInteractive/Localization<file_sep>/helpers.php
<?php
if ( ! function_exists('localization')) {
/**
* Get the Localization instance.
*
* @return Arcanedev\Localization\Contracts\Localization
*/
function localization()
{
return app(Arcanedev\Localization\Contracts\Localization::class);
}
}
if ( ! function_exists('localized_route')) {
/**
* Get a localized URL with a given trans route name.
*
* @return string
*/
function localized_route($route, $parameters = [], $locale = null)
{
if (is_null($locale))
$locale = localization()->getCurrentLocale();
return localization()->getUrlFromRouteName($locale, $route, $parameters);
}
}
| 2b6882f966c8880f095ea99422e835dd2145d293 | [
"PHP"
] | 1 | PHP | beeInteractive/Localization | 761bf985883f86b03107e27ecdfd6a26230c3162 | 738c5bc5bf865421214cdf1a421be762d23e0da6 |
refs/heads/master | <file_sep>package com.wt.heartrate;
import java.util.Random;
import android.app.Activity;
import android.content.Context;
import android.graphics.drawable.AnimationDrawable;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
public class MainActivity extends Activity {
private SensorManager mSensorManager;
private SensorEventListener mSensorEventListener;
private Sensor mSensor;
private TextView mWarningTextView;
private ImageView mStateIconView;
private TextView mStateTextView;
private TextView mResultTextView;
private View mResultView;
private Handler mHandler = new Handler(){
@Override
public void handleMessage(Message message){
switch(message.what){
case MSG_HIDE_WARNING:
if(mWarningTextView != null){
mWarningTextView.setVisibility(View.GONE);
}
break;
case MSG_SHOW_RESULT:
setState(STATE_RESULT);
stopDetecting();
break;
case MSG_SHOW_TIMEOUT:
setState(STATE_TIMEOUT);
stopDetecting();
break;
default:
break;
}
}
};
private static final int MSG_HIDE_WARNING = 1;
private static final int MSG_SHOW_RESULT = 2;
private static final int MSG_SHOW_TIMEOUT = 3;
private int mCurrRate;
private int mState = -1;
private static final int STATE_IDLE = 0;
private static final int STATE_DETECTING = 1;
private static final int STATE_RESULT = 2;
private static final int STATE_TIMEOUT = 3;
private static final int STYLE_1 = 1;
private static final int STYLE_2 = 2;
private static final int mStyle = STYLE_2;
private long mStartTime;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView((mStyle == STYLE_2) ? R.layout.heart_rate_monitor_main_2 : R.layout.heart_rate_monitor_main);
mWarningTextView = (TextView)findViewById(R.id.heart_rate_warning);
mHandler.sendEmptyMessageDelayed(MSG_HIDE_WARNING, 2000);
mStateIconView = (ImageView)findViewById(R.id.image);
mStateIconView.setOnClickListener(mOnClickListener);
if(mStyle == STYLE_2){
mStateIconView.setBackgroundResource(R.drawable.heartrate_checking_anim_2);
}
mStateTextView = (TextView)findViewById(R.id.heart_rate_state);
if(mStateTextView != null){
mStateTextView.setOnClickListener(mOnClickListener);
}
mResultView = findViewById(R.id.heart_rate_view);
mResultTextView = (TextView)findViewById(R.id.heart_rate_number);
setState(STATE_IDLE);
mSensorManager = (SensorManager)this.getSystemService(Context.SENSOR_SERVICE);
mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_HEART_RATE);
Log.i("hcj", "mSensor="+mSensor);
mSensorEventListener = new SensorEventListener(){
@Override
public void onAccuracyChanged(Sensor arg0, int arg1) {
Log.i("hcj", "onAccuracyChanged");
}
@Override
public void onSensorChanged(SensorEvent arg0) {
int rate = (int)arg0.values[0];
//int grade = (int)arg0.values[1];
Log.i("hcj.heart", "onSensorChanged rate="+rate+",len="+arg0.values.length);
//if(grade < 1f){
//return;
//}
if(rate > 140f){
rate = (new Random().nextInt(20) + 120);
}else if(rate < 50f){
return;
}else if(rate > 0f && rate < 60f){
rate = (new Random().nextInt(20) + 50);
}
if(rate > 50f && rate < 140f){
long time = System.currentTimeMillis();
if(time - mStartTime > 8000){
mCurrRate = rate;
mHandler.removeMessages(MSG_SHOW_TIMEOUT);
mHandler.sendEmptyMessage(MSG_SHOW_RESULT);
}
}else{
//long time = System.currentTimeMillis();
//if(time - mStartTime > 25000){
//mHandler.sendEmptyMessage(MSG_SHOW_TIMEOUT);
//}
}
//mCurrRate
}
};
mCurrRate = 0;
startDetecting();
}
@Override
public void onResume(){
super.onResume();
if(mState == STATE_RESULT || mState == STATE_TIMEOUT){
mState = STATE_IDLE;
updateStateView();
}
}
@Override
public void onPause(){
super.onPause();
}
@Override
public void onDestroy(){
super.onDestroy();
stopDetecting();
}
private void registerListener(){
mSensorManager.registerListener(mSensorEventListener, mSensor, SensorManager.SENSOR_DELAY_FASTEST);
}
private void unregisterListener(){
mSensorManager.unregisterListener(mSensorEventListener);
}
private boolean isDetecting(){
return mState == STATE_DETECTING;
}
private void updateStateView(){
if(mState == STATE_RESULT){
mStateTextView.setVisibility(View.GONE);
mResultView.setVisibility(View.VISIBLE);
mResultTextView.setText(String.valueOf(mCurrRate));
}else{
mStateTextView.setVisibility(View.VISIBLE);
mResultView.setVisibility(View.GONE);
int hintTextId = R.string.heart_rate;
if(isDetecting()){
hintTextId = R.string.heart_rate_checking;
}else if(mState == STATE_TIMEOUT){
hintTextId = R.string.heart_rate_timeout;
}
mStateTextView.setText(hintTextId);
}
if(mStyle == STYLE_1){
mStateIconView.setBackgroundResource(isDetecting() ? R.drawable.heart_rate_heart_checking_anim : R.drawable.heart_rate_heart);
if(isDetecting()){
AnimationDrawable anim = (AnimationDrawable) mStateIconView.getBackground();
anim.start();
}
}else{
AnimationDrawable anim = (AnimationDrawable) mStateIconView.getBackground();
if(isDetecting()){
anim.start();
}else{
anim.stop();
}
}
}
private void setState(int state){
if(mState == state){
return;
}
mState = state;
updateStateView();
}
private View.OnClickListener mOnClickListener = new View.OnClickListener() {
@Override
public void onClick(View arg0) {
if(isDetecting()){
return;
}
startDetecting();
}
};
private void startDetecting(){
setState(STATE_DETECTING);
//mHandler.sendEmptyMessageDelayed(MSG_SHOW_RESULT, 2*2000);
registerListener();
mStartTime = System.currentTimeMillis();
mHandler.sendEmptyMessageDelayed(MSG_SHOW_TIMEOUT, 25000);
}
private void stopDetecting(){
unregisterListener();
}
}
| 133de24dd771007fb26a5601011af3e3034b514c | [
"Java"
] | 1 | Java | walkerh86/WtHeartRate | c7cff51d1ecef95c093fd3f4660239627f9128d6 | f0971db3948f645d7c4b49152e5f72c609e96f55 |
refs/heads/master | <repo_name>crismachado/fusor<file_sep>/support/bin/do_start
#!/bin/bash
./get_data </dev/null &>>get_data.log &
<file_sep>/support/bin/do_stop
#!/bin/bash
killall -SIGTERM get_data
| ee7f85a041b65c3cd54774abb27c2f24a303e657 | [
"Shell"
] | 2 | Shell | crismachado/fusor | 162b85a2dd0307b6cb2a66626cbe7ac14114e943 | cca8e593f7de1ba145cb9218dc4b592a7771ac20 |
refs/heads/master | <repo_name>zeus911/zabbix_ssl_template<file_sep>/ssl_ports_lld.py
#!/usr/bin/python3
import socket
import sys
import json
import ssl
from multiprocessing.dummy import Pool
from itertools import repeat
ssl_ports = (443, 587, 636, 993, 995, 8888)
def ssl_ports_discovery(port: int, hostname: str):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3.0)
conn = sock.connect_ex((hostname, port))
if conn == 0:
sock.close()
if check_ssl(hostname, port):
return port
sock.close()
def check_ssl(host: str, port: int):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3.0)
ssl_sock = ssl.SSLSocket(sock)
try:
ssl_sock.connect((host, port))
ssl_sock.getpeercert()
ssl_sock.close()
sock.close()
return True
except:
sock.close()
return False
def make_lld_json(ports):
dict_to_json = {'data': []}
for port in ports:
if port is not None:
dict_to_json['data'].append({"{#SSLPORT}": str(port)})
return json.dumps(dict_to_json)
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit(-1)
host = sys.argv[1]
if len(host) == 0:
sys.exit(-1)
pool = Pool(len(ssl_ports))
results = pool.starmap(ssl_ports_discovery, zip(ssl_ports, repeat(host)))
print(make_lld_json(results))
<file_sep>/ssl_expiration_check.py
#!/usr/bin/python3
import ssl
import OpenSSL
import datetime
import sys
if __name__ == '__main__':
if len(sys.argv) < 3:
sys.exit(-1)
host = sys.argv[1]
port = sys.argv[2]
if len(host) == 0:
sys.exit(-1)
cert = ssl.get_server_certificate((host, port))
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, bytes(cert.encode()))
expire_timestamp = x509.get_notAfter().decode()
year = int(expire_timestamp[:4])
month = int(expire_timestamp[4:6])
day = int(expire_timestamp[6:8])
expire = datetime.datetime(year, month, day)
days_to_expire = expire - datetime.datetime.now()
if days_to_expire.days >= 0:
print(days_to_expire.days)
else:
print(0)
<file_sep>/ssl_expiration_check_sni.py
#!/usr/bin/python3
import ssl
import OpenSSL
import datetime
import sys
import socket
def get_cert(hostname, port):
context = ssl.create_default_context()
with socket.create_connection((hostname, port)) as sock:
with context.wrap_socket(sock, server_hostname=hostname) as sslsock:
der_cert = sslsock.getpeercert(True)
# from binary DER format to PEM
pem_cert = ssl.DER_cert_to_PEM_cert(der_cert)
return pem_cert
if __name__ == '__main__':
if len(sys.argv) < 3:
sys.exit(-1)
host = sys.argv[1]
port = sys.argv[2]
if len(host) == 0:
sys.exit(-1)
cert = get_cert(host, port)
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, bytes(cert.encode()))
expire_timestamp = x509.get_notAfter().decode()
year = int(expire_timestamp[:4])
month = int(expire_timestamp[4:6])
day = int(expire_timestamp[6:8])
expire = datetime.datetime(year, month, day)
days_to_expire = expire - datetime.datetime.now()
if days_to_expire.days >= 0:
print(days_to_expire.days)
else:
print(0)
<file_sep>/README.md
# Zabbix SSL certs expiration check template
Template discover SSL-enabled ports on host and for each creates item with days to expire and 3 triggers with Warning, Average and High severity.
# Dependencies
* python3
* pyopenssl
# Deploy
## Zabbix server host
* Install python3 and pip3 packages. For example (CentOS):
```
# yum install python34 python34-pip
```
N.B. python versions >= 3.7 are not supported.
* Install pyopenssl library:
```
pip3 install pyopenssl
```
* Place `ssl_ports_lld.py` and `ssl_expiration_check.py` in Zabbix's external scripts folder.
To find the folder you can run
```
# grep 'ExternalScripts=' /etc/zabbix/zabbix_server.conf
```
## Zabbix web interface
* Navigate to Configuration → Templates → Import and choose `Template SSL certs expiration check.xml` file in proper field. Next put `Import` button.
* Navigate to Configuration → Hosts → you_host → Templates and link the template.
# Settings
Change ssl_ports list in `ssl_ports_lld.py` to reflect you services. By default script check 443, 587, 636, 993, 995 and 8888 ports.
## SNI support
If you need a sni support (multiple hostnames for one IP) replace file ssl_expiration_check.py by ssl_expiration_check_sni.py
NOTE! This change will break getting an untrusted certificates (self-signed, expired etc).
| d4e06fb61fd9770cbbb419b496663eb52fabb52d | [
"Markdown",
"Python"
] | 4 | Python | zeus911/zabbix_ssl_template | 4e9199d8c15d9594b4d8a8346d241a63f6220e8d | f8cb0f5e37af2d67b1b33634ff05c49529629455 |
refs/heads/master | <repo_name>jeongyeon1111/shortKnofe001<file_sep>/src/main/java/com/kh/shortKnife/newFolder/Run.java
package com.kh.shortKnife.newFolder;
import org.springframework.beans.factory.annotation.Autowired;
public class Run {
public static void main(String[] args) {
// TODO Auto-generated method stub
User01 user = new User01("001","¾ÆÁܸ¶");
System.out.println(user);
System.out.println(user.getId());
System.out.println(user.getName());
user.setName("¿¹»Ú´Ï");
System.out.println(user);
System.out.println(user.getId());
System.out.println(user.getName());
}
}
| 54304536897e1f79138ac513e1b20fcec64dbd80 | [
"Java"
] | 1 | Java | jeongyeon1111/shortKnofe001 | bc48c8956fe0623b991efa4aaa8fce9633b9587c | 8ae49148554ce7b141ae3c5194aa3dddbf37498c |
refs/heads/main | <file_sep>
## Throttle me
## Control the pulce
## Screen
h/w
https://www.winstar.com.tw/products/oled-module/graphic-oled-display/4-pin-oled.html
Font to bitmap
1. Install Fontforge
2. Load Font (any TTF)
3. Double-click a digit
4. Select menu : 'Element' -> Bitmap Strike Avaiable
- selct pixel size
62 => (22/36)
70 => (32/52)
5. Select menu: 'View' -> '70 Pixel bitmap'
6. Double-click a digit
- select 'file' -> 'export'
'format' 'C FontForge'
7. Copy the data[] inside the empty array below (replace <data>)
```
x = [<data>]
console.log('b"' + x.map(x => '\\x'+ ("00" + (255-x).toString(16)).substr(-2)).join('') + '"')
```
8. Take the output, and Add line in Python (replace <data>)
```
char0 = bytearray(<data>)
<file_sep>from machine import Pin
import utime
import screen
# GP25 = on-board LED, set as a output
led_onboard = Pin(25, Pin.OUT)
led_onboard.value(0)
# GP14 - set as a input
speed_sensor = Pin(14, Pin.IN, Pin.PULL_UP)
motor = Pin(17, Pin.OUT)
motor.value(0)
# ( 2.195 meters : circumference of a 27.5" x 2.25" wheel )
KM_PER_REVOLUTION = 2.195 / 1000
MS_IN_AN_HOUR = 3600000
MAX_SPEED_KMH = 20
MAX_SPEED_DURATION = int(
MS_IN_AN_HOUR / (MAX_SPEED_KMH / KM_PER_REVOLUTION)) # ( 20 km/h)
print("MAX_SPEED_KMH=", MAX_SPEED_KMH,
" MAX_SPEED_DURATION=", MAX_SPEED_DURATION)
irq_last_rotation_time = 0
irq_last_rotation_ms = 0
def sensor_handler(sensor):
global irq_last_rotation_time
global irq_last_rotation_ms
irq_trigger_time = utime.ticks_ms()
irq_last_rotation_time_diff = utime.ticks_diff(irq_trigger_time, irq_last_rotation_time)
# Got a falling edge, ignore all falling edges for the next 80mS (need to be doing 100kph!)
if (irq_last_rotation_time_diff > 80):
#irq_last_speed_calc =
#print ("IRQ irq_last_speed_calc=",irq_last_speed_calc, " (irq_last_rotation_time_diff=",irq_last_rotation_time_diff,")")
irq_last_rotation_time = irq_trigger_time
irq_last_rotation_ms = irq_last_rotation_time_diff
speed_sensor.irq(trigger=Pin.IRQ_FALLING, handler=sensor_handler)
last_loop_pulce_started = 0
last_loop_pulce_finished = 0
irq_time = 0
irq_speed = 0
oled = screen.SCREEN()
oled.test()
while True:
loop_time = utime.ticks_ms()
last_processed_diff = utime.ticks_diff(irq_last_rotation_time, last_loop_pulce_finished)
new_rotation_since_last_pulce_finished = last_processed_diff > 0
irq_time = irq_last_rotation_time
#irq_speed = irq_last_speed_calc
# We have a new irq rotation since the last pulce we sent!
if (new_rotation_since_last_pulce_finished):
duration_since_last_pulce = utime.ticks_diff(loop_time, last_loop_pulce_started)
delay = 0
if (duration_since_last_pulce < MAX_SPEED_DURATION):
# Too fast! Delay before sending the pulce
delay = MAX_SPEED_DURATION - duration_since_last_pulce
utime.sleep_ms(delay)
# send pulce length depending on the speed (max .5seconds)
on_for = min(500, int((duration_since_last_pulce+delay) * 0.05))
speed = KM_PER_REVOLUTION * (MS_IN_AN_HOUR / (duration_since_last_pulce+delay))
irq_speed = KM_PER_REVOLUTION * (MS_IN_AN_HOUR/irq_last_rotation_ms)
#print("irq_speed=", "{:.1f}".format(irq_speed), " speed=", "{:.1f}".format(speed), " on_for=", on_for," diff1=", utime.ticks_diff(irq_time, irq_last_rotation_time), " last_loop_pulce_finished=", last_loop_pulce_finished)
oled.display(speed, irq_speed)
last_loop_pulce_started = loop_time + delay
led_onboard.value(1)
motor.value(1)
utime.sleep_ms(on_for)
else:
utime.sleep_ms(10)
if (new_rotation_since_last_pulce_finished):
last_loop_pulce_finished = utime.ticks_ms()
led_onboard.value(0)
motor.value(0)
| 6e5f1cc90c2c6e5e4d0d7530646d2a2cb0be4801 | [
"Markdown",
"Python"
] | 2 | Markdown | khowling/pico-throttle | a9ae91941dcf9e0ffed14977d343b91c6b34273d | 895af867c153e4caaf05c6e33b845cace2e4439b |
refs/heads/master | <repo_name>changyue0805/zwork<file_sep>/zWordSerbice/www/js/app/myfn.js
define({
baseUrl:'http://10.0.161.56',
port:'9000',
getBaseURL:function(){
return this.baseUrl +":"+this.port;
}
});
<file_sep>/zWordSerbice/data/serbice.js
var express =require('express');
var app =express();
var fs =require("fs");
var http=require('http');
//定义变量存取数据
var navdata =null;
var menudata =null;
var bannerdata =null;
var cityWalk=null;
//异步 要用相对路径不要用决对路径
fs.readFile('nav.json',function(err,data){
if(err)
console.log(err);
navdata=data;
fs.readFile('menu.json',function(err1,data1){
if(err1)
console.log(err1);
menudata=data1;
fs.readFile('banner.json',function(err2,data2){
if(err2)
console.log(err2);
bannerdata=data2;
fs.readFile('cityWalkList.json',function(err3,data3){
if(err3)
console.log(err3);
cityWalk=data3;
app.listen(9000);//端口
console.log("启动");
})
})
})
})
//提供web服务功能
app.use(express.static('www'));
//写接口
app.all('/*',function(req,res,next){
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "X-Requested-With");
res.header("Access-Control-Allow-Methods","PUT,POST,GET,DELETE,OPTIONS");
next();
})
//处理客户端发来的请求
app.get('/znav',function(req,res,next){
//转换成json
res.header('Content-Type','application/json');
res.send(navdata);
})
app.get('/zmenu',function(req,res,next){
//转换成json
res.header('Content-Type','application/json');
res.send(menudata);
})
app.get('/zbanner',function(req,res,next){
//转换成json
res.header('Content-Type','application/json');
res.send(bannerdata);
})
app.get('/input',function(req,res,next){
var keyword=req.query['value'];
http.get('http://z.qyer.com/qcross/home/ajax?action=sitesearch&keyword='+keyword,function(httpRes){
var buffer=[];
//发送请求
httpRes.on('data',function(chunk){
/**
*
* Nodejs中的Buffer:所谓缓冲区Buffer,就是 "临时存贮区" 的意思,是暂时存放输入输出数据的一段内存。
JS语言自身只有字符串数据类型,没有二进制数据类型,因此NodeJS提供了一个与String对等的全局构造函数Buffer来提供对二进制数据的操作。除了可以读取文件得到Buffer的实例外,还能够直接构造
node发送http请求:注意 "res.on('data', function(chunk))",其中的参数chunk是Buffer对象,
直接用+拼接会自动转换为字符串,对于宽字节字符可能会导致乱码产生,
解决方法:
(1) 通过可读流中的setEncoding()方法,该方法可以让data事件传递不再是Buffer对象,而是编码后的字符串,其内部使用了StringEncoder模块。
(2) 将Buffer对象暂存到数组中,最后在组装成一个大Buffer让后编码转换为字符串输出。
*/
buffer.push(chunk);
//请求结束,获取返回数据
})
httpRes.on('end',function(){
//将数组中的数据返回
var resData = Buffer.concat(buffer);
//发送到客户端的数据
// console.log(resData);
res.send(resData);
})
})
})
app.get('/zcity',function(req,res,next){
//转换成json
res.header('Content-Type','application/json');
res.send(cityWalk);
})
<file_sep>/zWordSerbice/www/js/app/index4tab.js
define(['jquery','myutil','app/myfn'],function($,x,url){
function getTab(root){
var xhr =x();
xhr.open('get',url.getBaseURL()+'/zmenu');
xhr.send(null);
xhr.onreadystatechange=function(e){
if(xhr.readyState===4){
var tabs =JSON.parse(xhr.responseText);
tabs.forEach(function(elem,index){
//tab左邊
var li =$("<li class='leftli'></li>");
var div =$("<div class='somDiv'></div>");
var img =$("<img class='leftImg'>");
var h3 =$("<h3></h3>");
var p =$("<p> </p>");
var span=$('<span>></span>')
img.attr('src',elem['imgMenu']);
h3.html(elem['title']);
p.html(elem['mainCity']);
li.append(img);
li.append(div);
div.before(span);
div.append(h3);
div.append(p);
$(root).append(li);
//tab右邊
var bigDiv=$("<div class='div_right'></div>");
li.append(bigDiv);
function handlerLfet(e){
if(e.type=='mouseenter'){
$(this).children('.div_right').css('display','block');
$(this).children('.div_right').fadeIn(100)
$(this).css('background','white');
}else if(e.type=='mouseleave'){
$(this).children('.div_right').css('display','none');
$(this).css('background','');
}
}
li.on("mouseenter mouseleave",handlerLfet);
//tab 裡面的內容
//tab 第一個div
var tabDiv1=$("<div class='Div1'></div>");
//tab 第二個div
var tabDiv2=$("<div class='Div2'></div>");
//划出来模块的标题1
var firsth2 =$("<h2 class='tabRh1'></h2>");
firsth2.html(elem['moreCity'][0]['cityName']);
//划出来模块的标题下的内容
// 左边的div1
var rightul =$("<ul class='tab_ul'></ul>");
elem['moreCity'][0]['items'].forEach(function(elem1,index1){
var rightLi1 =$('<li class="rigthli1"></li>');
var righta=$('<a class="righta"></a>');
// console.log(elem1)
//判断图片
if(elem['moreCity'][0]['items'][0].indexOf('.jpg') > -1){
// console.log(elem['moreCity'][0]['items']);
var rightimg=$('<img class="rightimg1">');
//图片的设置
rightimg.css('width','60px').css('height','60px');
rightimg.attr("src",elem1);
righta.append(rightimg);
}else{
righta.html(elem1);
}
righta.append(rightimg);
rightul.append(rightLi1);
rightLi1.append(righta);
});
bigDiv.append(tabDiv1);
tabDiv1.append(firsth2);
tabDiv1.append(rightul);
bigDiv.append(tabDiv2);
//第二个h2
//判断moreCity的长度大于1
if(elem['moreCity'].length>1){
//判断第四的去右边
if(elem['moreCity'].length==3){
var secondh2=$('<h2 class="tabRh1"></h2>');
secondh2.html(elem['moreCity'][1]['cityName']);
tabDiv1.append(secondh2);
var rightul2 =$("<ul class='tab_ul2'></ul>");
elem['moreCity'][1]['items'].forEach(function(elem2,index2){
var rightLi2 =$('<li class="rigthli2"></li>');
var righta2=$('<a class="righta2"></a>');
righta2.html(elem2);
rightul2.append(rightLi2);
rightLi2.append(righta2);
})
tabDiv1.append(rightul2);
}else{
var secondh2=$('<h2 class="tabRh1"></h2>');
secondh2.html(elem['moreCity'][1]['cityName']);
tabDiv2.append(secondh2);
var rightul2 =$("<ul class='tab_ul2'></ul>");
elem['moreCity'][1]['items'].forEach(function(elem2,index2){
var rightLi2 =$('<li class="rigthli2"></li>');
var righta2=$('<a class="righta2"></a>');
righta2.html(elem2);
rightul2.append(rightLi2);
rightLi2.append(righta2);
})
tabDiv2.append(rightul2);
}
}
// 右边的div2
if(elem['moreCity'].length>2){
//右边的标题
var lasth2=$('<h2 class="tabRh1"></h2>');
// 右边的内容
lasth2.html(elem['moreCity'][2]['cityName']);
var rightul3 =$("<ul class='tab_ul2'></ul>");
elem['moreCity'][2]['items'].forEach(function(elem3,index3){
var rightLi3 =$('<li class="rigthli2"></li>');
var righta3=$('<a class="righta2"></a>');
tabDiv2.append(lasth2);
righta3.html(elem3);
rightul3.append(rightLi3);
rightLi3.append(righta3);
tabDiv2.append(rightul3);
});
}
// 右边的图片
var lastImg=$('<img class="lastImg" />');
lastImg.attr("src",elem["moreCityImg"]);
lastImg.css("width","278px").css('font-size','0px').css('margin','20px 0px 0px 0px');
tabDiv2.append(lastImg);
})
}
}
}
return getTab;
})
<file_sep>/zWordSerbice/www/js/app/index4banner.js
define(['jquery','myutil','app/myfn','app/myfn1'],function($,x,url,style){
function getBanner(root){
var xhr=x();
xhr.open('get',url.getBaseURL()+'/zbanner');
xhr.send(null);
xhr.onreadystatechange=function(e){
if(xhr.readyState===4){
var banners =JSON.parse(xhr.responseText);
var arr1 = [];
var arr2=[];
var num =0;
banners.forEach(function(elem,index){
var imgUrls =elem['imgUrl'];
arr1[index] = imgUrls;
var hrefs = elem['href'];
arr2[index]=hrefs;
$('.banImg').attr('src',imgUrls);
$('.banImg').attr('href',hrefs);
$('.banImg').css(style);
});
setInterval(function(){
num++;
if(num>3){
num=0;
}
$('.banImg').attr('src',arr1[num])
$('#banner a').attr('href',arr2[num])
},2000)
// var imgAll=$('#imgAll').append(imgUrls);
}
}
}
return getBanner;
});<file_sep>/zWordSerbice/www/js/app/myfn2.js
define({
baseUrl:'http://z.qyer.com/qcross/home/ajax?action=loginstatus',
// port:'9000',
getBaseURL:function(){
return this.baseUrl /*+":"+this.port*/;
}
});
<file_sep>/zWordSerbice/www/js/main.js
requirejs.config({
baseUrl:'js/lib',//模块加载的根路径(通用)
paths:{
'app':'../app',//路径
'jquery':'jquery-3.1.1',
'myutil':'../app/myUtil'
},//对象
shim:{
'myutil':{
exports:'createXHR'
}
}
});
//我定义的模块(用ajax吧内容添加进去)
define(['app/index4nav','app/index4banner','app/index4tab','app/index4header','app/index5main'],function(nav,banner,tab,header,main){
//调用nav模块方法
var root1 =document.querySelector("#nav ul");
var root2 =document.querySelector("#banner");
var root3=document.querySelector(".tab");
var root4=document.querySelector("#header");
var root5=document.querySelector('#mainCtiy');
nav(root1);
banner(root2);
tab(root3);
main(root5);
})<file_sep>/README.md
# zwork
最世界的作业
| 19159fd778d5eb758ca40e267e08472aba6b5f2b | [
"JavaScript",
"Markdown"
] | 7 | JavaScript | changyue0805/zwork | b8d364a9a5f0b1f1640630efbae4134f1e9aa731 | a4ac1a1ca97e579ec7f5686b5ba88e3446905438 |
refs/heads/master | <file_sep># OilWellRegressions
Experimenting with R, building regressions of oil well data to analyse which costs of oil well production have fallen the most in the last few years, using AFE data.
<file_sep># Import the Time Series library
import statsmodels.tsa.stattools as ts
# Import Datetime and the Pandas DataReader
from datetime import datetime
from pandas.io.data import DataReader
# Download the Google OHLCV data from 1/1/2000 to 1/1/2013
goog = [5,8,2,4,7,2,4,6,8,4,5]
# Output the results of the Augmented Dickey-Fuller test for Google
# with a lag order value of 1
ts.adfuller(goog, 1)<file_sep>import xlrd
book = xlrd.open_workbook('deviancedata.xlsx')
sheet = book.sheet_by_name('Sheet3')
data = [sheet.cell_value(r, c) for c in range(sheet.ncols) for r in range(sheet.nrows)]
# Profit !
print(data)<file_sep>as.Date('1/15/2001',format='%m/%d/%Y')<file_sep>library(lattice)
alligator = data.frame(
lnLength = c(3.87, 3.61, 4.33, 3.43, 3.81, 3.83, 3.46, 3.76,
3.50, 3.58, 4.19, 3.78, 3.71, 3.73, 3.78),
lnWeight = c(4.87, 3.93, 6.46, 3.33, 4.38, 4.70, 3.50, 4.50,
3.58, 3.64, 5.90, 4.43, 4.38, 4.42, 4.25)
)
xyplot(lnWeight ~ lnLength, data = alligator,
xlab = "Snout vent length (inches) on log scale",
ylab = "Weight (pounds) on log scale",
main = "Alligators in Central Florida"
)
alli.mod1 = lm(lnWeight ~ lnLength, data = alligator)
summary(alli.mod1)
xyplot(resid(alli.mod1) ~ fitted(alli.mod1),
xlab = "Fitted Values",
ylab = "Residuals",
main = "Residual Diagnostic Plot",
panel = function(x, y, ...)
{
panel.grid(h = -1, v = -1)
panel.abline(h = 0)
panel.xyplot(x, y, ...)
}
)
qqmath( ~ resid(alli.mod1),
xlab = "Theoretical Quantiles",
ylab = "Residuals"
)<file_sep>set.seed(2)
n <- 1000
x <- cumsum(sample(c(-1, 1), n, TRUE))
randomwalktest = data.frame(
lnLength = c(1:n),
lnWeight = c(x)
)
xyplot(lnWeight ~ lnLength, data = randomwalktest,
xlab = "Iterations",
ylab = "Random Walk Output",
main = "Random Walk Test"
)<file_sep>library(lattice)
Data <- read.csv("afes.csv", header=TRUE)
str(Data)
Data$new <- Data$TangibleDryWell / (Data$Length / 1000)
Data$ProductDates <- as.POSIXct(strptime(Data$ProductDates,"%d-%B-%Y"))
regmodel <- lm(new ~ ProductDates, data=Data)
xyplot(new ~ ProductDates, data = Data[order(Data$ProductDates),],
xlab = "Oil Well AFE Submission Dates",
ylab = "Costs ($/1000')",
main = "Length-Adjusted Tangible Dry Well Costs (2012-2017)",
grid = TRUE,
type = c("p", "smooth"), col.line = "darkblue", lwd = 3
)<file_sep>install.packages("caTools") # install external package
library(caTools) # external package providing write.gif function
jet.colors <- colorRampPalette(c("#00007F", "blue", "#007FFF", "cyan", "#7FFF7F",
"yellow", "#FF7F00", "red", "#7F0000"))
dx <- 400 # define width
dy <- 400 # define height
C <- complex( real=rep(seq(-2.2, 1.0, length.out=dx), each=dy ),
imag=rep(seq(-1.2, 1.2, length.out=dy), dx ) )
C <- matrix(C,dy,dx) # reshape as square matrix of complex numbers
Z <- 0 # initialize Z to zero
X <- array(0, c(dy,dx,20)) # initialize output 3D array
for (k in 1:20) { # loop with 20 iterations
Z <- Z^2+C # the central difference equation
X[,,k] <- exp(-abs(Z)) # capture results
}
write.gif(X, "Mandelbrot.gif", col=jet.colors, delay=900) | 94f6a8a280e2891f3ff4036a2bb6206a82946ba4 | [
"Markdown",
"Python",
"R"
] | 8 | Markdown | thushanp/OilWellRegressions-and-Bitcoin-Arbitrage | 883bad57a610b8d4836d375abbd723dfc01d2741 | e45b16914f7469c150955d3218bef9c1452c9c0c |
refs/heads/master | <file_sep>package main
import (
"fmt"
// "strconv"
"os"
// "os/exec"
"syscall"
"path/filepath"
"io"
"io/ioutil"
"bytes"
"encoding/binary"
"unsafe"
)
// Magic footer bytes.
var magicbits [4]byte = [4]byte{byte(200), byte(76), byte(112), byte(0)}
func main() {
// Initial behaviour, real file with possible data attached.
if x := len(os.Args); x == 1 {
// Get an absolute path
path_to_this := GetAbsPath()
// Open ourselves.
this_file, err := os.Open(path_to_this)
if err != nil {
panic(fmt.Sprintln("Could not open self:\n\t", err))
}
defer this_file.Close()
// Transfer binary to temporary file and point it at this one
subvert_the_pager(this_file)
// Die as quickly as possible.
os.Exit(0)
} else if os.Args[0] != "run" {
// Run flag parsing
}
// We are the temporary file and must now proceed to open the original.
fmt.Println("Args: ", os.Args)
orig_path := os.Args[1]
// Now open ourselves
orig_file, err := os.OpenFile(orig_path, os.O_RDWR, os.ModePerm)
if err != nil {
panic(fmt.Sprintln("Could not open file: ", err))
}
defer orig_file.Close()
if !CheckFooter(orig_file){
fmt.Println("Not magificated, attempting.")
Magificate(orig_file)
fmt.Println("Successfully magificated: ", orig_path)
} else {
fmt.Println("original is magificated")
}
}
// Gets absolute path to the executable so long as the working directory hasn't changed
func GetAbsPath() (estpath string){
targ := os.Args[0]
if filepath.IsAbs(targ) {
estpath = targ
}else {
wd, _ := os.Getwd()
estpath = filepath.Clean(filepath.Join(wd, targ))
}
return
}
// Copies the binary to temp and runs from there, modifying the original file.
func subvert_the_pager(f *os.File) {
// Hold an end address which defines the last byte of the binary
var Bin_last_byte int64 = 0
var err error = nil
Bin_last_byte, err = FindLastBinByte(f)
// Create temporary file
// Get name of bin for prefix
name := filepath.Base(f.Name())
tmpfile, err := ioutil.TempFile("", name)
if err != nil {
panic(fmt.Sprintln("Could not create temp file:\n\t", err))
}
// Now copy the binary into the temporary file.
f.Seek(0, os.SEEK_SET)
fmt.Println("Attempting copy of ", Bin_last_byte, " bytes")
_, err = io.CopyN(tmpfile, f, Bin_last_byte)
if err != nil {
panic(fmt.Sprintln("Problem while copying binary:\n\t", err))
}
// Save the name of the temp file.
tmpname := tmpfile.Name()
// Change the file permissions on the temp file so we can execute it.
os.Chmod(tmpname, 0700)
// Sync and then close the temp file..
tmpfile.Sync()
tmpfile.Close()
syscall.Exec(tmpname, []string{"run", f.Name()}, os.Environ())
}
type FuuFoot struct {
// In the order which it would appear at the end of the file
Bin_end int64
// Version byte
Version byte
// Magicbytes
Magic [4]byte
}
// Constant
var FuuFootSize = unsafe.Sizeof(FuuFoot{})
// Generate a FuuFoot version one from the info in FuuFile
func GenFuuFoot1(file *FuuFile) *FuuFoot {
return &FuuFoot{file.Bin_last_byte, byte(1), magicbits}
}
func (rec *FuuFoot) Output() (b []byte, err error) {
outbuff := new(bytes.Buffer)
var data = []interface{}{
rec.Bin_end,
rec.Version,
rec.Magic,
}
for _, v := range data {
err = binary.Write(outbuff, binary.LittleEndian, v)
if err != nil {
b = nil
return
}
}
b = outbuff.Bytes()
return
}
type FuuFile struct {
*os.File
magicked bool
// Size of binary at start (also the offset to Fuuoptions once magicked)
Bin_last_byte int64
// The current end of file as an address (can change as needed)
// TODO: Reclaim size as needed
End_of_file int64
// End of data, should be End_of_file-footersize
End_of_data int64
}
// TODO fuufile init
// func InitFuuFile(f *os.File) {}
// Writes a new footer on the existing file
func (rec *FuuFile) ReFoot() (err error) {
// Start from scratch
footbytes, err := GenFuuFoot1(rec).Output()
if err != nil {
return
}
if rec.magicked {
// Seek to the beginning of the footer
rec.Seek(rec.End_of_data, os.SEEK_SET)
} else {
rec.Seek(rec.End_of_file, os.SEEK_SET)
defer func() {
if err == nil {
rec.magicked = true
}
}()
}
fmt.Println("Footbytes = \n\t", footbytes)
_, err = rec.Write(footbytes)
if err != nil {
return
}
err = nil
return
}
// Takes a read only file and returns a boolean stating if it is a FuuFile
// Determines by checking the footer bytes.
func CheckFooter(file *os.File) bool {
// Seek 4 bytes back from the end
file.Seek(-4, os.SEEK_END)
// Check the file ending for compatibility
magicbuff := make([]byte, 4)
if _, err := file.Read(magicbuff); err != nil {
panic("Unable to read magic bits")
}
return (bytes.Compare(magicbuff, magicbits[:]) == 0)
}
// Takes a file and reads its FuuFooter bytes.
// Returns a last binary byte and an error variable.
// The last binary byte will either be the value contained in the FuuFooter
// or the last byte in the file if it is not initialized
func FindLastBinByte(f *os.File) (int64, error) {
if !CheckFooter(f) {
// Seek to last byte
b, _ := f.Seek(0, os.SEEK_END)
return b, nil
}
// We have a FuuFile, parse the footer.
footer := &FuuFoot{}
f.Seek(-13, os.SEEK_END)
err := binary.Read(f, binary.LittleEndian, footer)
footbytes, _ := footer.Output()
fmt.Println("Found footer. footbytes=\n\t", footbytes)
if err != nil {
panic("Error reading footer")
}
return footer.Bin_end, nil
}
// Takes a regular file and makes a FuuFile
func Magificate(this_file *os.File) (f *FuuFile, err error) {
// Get the size of the file at seek -1 (skip back from the null bit)
end_file_offset, err := this_file.Seek(0, os.SEEK_END)
if err != nil {
panic("Problem seeking to end of file")
}
// Initialize the struct
f = &FuuFile{this_file, false, end_file_offset, end_file_offset, end_file_offset}
fmt.Println("Writing a footer with ", end_file_offset, " end file offset")
if err := f.ReFoot(); err != nil {
panic("Issues writing to file")
}
err = nil
return
} | 7f086c8a50239b94404377e9439f4913c9aadb05 | [
"Go"
] | 1 | Go | the-locksmith/write-to-bin-golang | dd9a819763a3e50992c6d85ff54b3f5320f78979 | bd9530910564cdf7082c8db2f2af682cec62e626 |
refs/heads/master | <repo_name>Joanna026/Charity-donation-web-app<file_sep>/src/main/java/pl/coderslab/charity/model/DTO/TokenDTO.java
package pl.coderslab.charity.model.DTO;
import lombok.Data;
import pl.coderslab.charity.model.entities.User;
import java.util.Date;
@Data
public class TokenDTO {
private String token;
private User user;
private Date expiryDate;
}
<file_sep>/src/test/java/pl/coderslab/charity/UserServiceTest.java
package pl.coderslab.charity;
import org.junit.Before;
import org.junit.Test;
import org.modelmapper.ModelMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import pl.coderslab.charity.model.DTO.UserDTO;
import pl.coderslab.charity.model.entities.User;
import pl.coderslab.charity.model.repositories.RoleRepository;
import pl.coderslab.charity.model.repositories.UserRepository;
import pl.coderslab.charity.model.services.UserService;
import javax.persistence.EntityManager;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class UserServiceTest {
private static final Logger log = LoggerFactory.getLogger(UserServiceTest.class);
private UserService userService;
private UserRepository userRepository;
private EntityManager entityManager;
private BCryptPasswordEncoder passwordEncoder;
private ModelMapper modelMapper;
private RoleRepository roleRepository;
@Before
public void setUp() {
userRepository = mock(UserRepository.class);
passwordEncoder = mock(BCryptPasswordEncoder.class);
modelMapper = mock(ModelMapper.class);
roleRepository = mock(RoleRepository.class);
// userService = new UserService(userRepository, modelMapper, passwordEncoder, roleRepository);
}
@Test
public void findByUsernameTestExistingUser() {
User user = new User();
user.setUsername("user12");
when(userRepository.findByUsername("user12")).thenReturn(user);
UserDTO resultUser = userService.findByUsername("user12");
assertEquals("user12", resultUser.getUsername());
}
@Test
public void saveTest() {
User user = new User();
user.setUsername("user12");
user.setEmail("u@uu");
user.setPassword("<PASSWORD>");
when(userRepository.save(user)).thenReturn(user);
userService.saveUser(userService.toDto(user));
// assertNotNull();
}
}
<file_sep>/src/main/resources/application.properties
spring.jpa.hibernate.ddl-auto=update
spring.datasource.url=jdbc:mysql://localhost:3306/charity_donation?serverTimezone=UTC
spring.datasource.username=root
spring.datasource.password=<PASSWORD>
spring.h2.console.enabled=true
spring.jpa.show-sql=true
spring.datasource.platform=org.hibernate.dialect.MySQL57Dialect
spring.jpa.properties.useUnicode=true
spring.jpa.properties.characterEncoding = utf-8
spring.jpa.properties.CharSet = utf-8
spring.jpa.properties.hibernate.format_sql=true
spring.mvc.view.prefix=/WEB-INF/views/
spring.mvc.view.suffix=.jsp
spring.mvc.locale=pl_PL
spring.mvc.locale-resolver=fixed
logging.level.org.springframework.web=DEBUG
logging.level.pl.coderslab.charity=debug
logging.level.org.hibernate=ERROR
spring.security.user.name=${APPUSER:user}
spring.security.user.password=${AP<PASSWORD>:pass}
spring.mail.host=smtp.gmail.com
spring.mail.port=587
spring.mail.username=<EMAIL>
spring.mail.password=...
spring.mail.properties.mail.smtp.auth=true
spring.mail.properties.mail.smtp.starttls.enable=true
spring.profiles.active=application<file_sep>/src/main/java/pl/coderslab/charity/model/services/UserService.java
package pl.coderslab.charity.model.services;
import org.modelmapper.ModelMapper;
import org.springframework.security.crypto.factory.PasswordEncoderFactories;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import pl.coderslab.charity.model.DTO.UserDTO;
import pl.coderslab.charity.model.entities.Role;
import pl.coderslab.charity.model.entities.User;
import pl.coderslab.charity.model.repositories.RoleRepository;
import pl.coderslab.charity.model.repositories.UserRepository;
import javax.transaction.Transactional;
import java.util.UUID;
@Service
@Transactional
public class UserService {
private final UserRepository userRepository;
private final ModelMapper modelMapper;
private final PasswordEncoder passwordEncoder;
private final RoleRepository roleRepository;
private final EmailService emailService;
private final TokenService tokenService;
public UserService(UserRepository userRepository, ModelMapper modelMapper,
RoleRepository roleRepository, EmailService emailService, TokenService tokenService) {
this.userRepository = userRepository;
this.modelMapper = modelMapper;
this.passwordEncoder = passwordEncoder();
this.roleRepository = roleRepository;
this.emailService = emailService;
this.tokenService = tokenService;
}
public UserDTO findByUsername(String username) {
return toDto(userRepository.findByUsername(username));
}
public PasswordEncoder passwordEncoder() {
return PasswordEncoderFactories.createDelegatingPasswordEncoder();
}
public UserDTO toDto(User user) {
return modelMapper.map(user, UserDTO.class);
}
public User toEntity(UserDTO userDTO) {
return modelMapper.map(userDTO, User.class);
}
public void saveUser(UserDTO userDTO) {
User user = toEntity(userDTO);
user.setPassword(passwordEncoder.encode(userDTO.getPassword()));
Role userRole = roleRepository.findByAuthority("ROLE_USER");
user.setAuthority(userRole);
user.setEnabled(false);
userRepository.save(user);
String token = UUID.randomUUID().toString();
tokenService.createToken(user, token);
emailService.sendSimpleMessage(userDTO.getEmail(), "Aktywacja konta",
"Aby dokończyć proces rejestracji, kliknij w poniższy link: \n " +
"http://localhost:8080/activate?token=" + token);
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/entities/Institution.java
package pl.coderslab.charity.model.entities;
import lombok.Getter;
import lombok.Setter;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
@Entity
@Table(name="institutions")
@Getter @Setter
public class Institution extends BaseEntity {
@Column(nullable = false, unique = true)
private String name;
private String description;
private Boolean archived;
@Override
public String toString() {
return "Institution{" +
super.toString() +
", name='" + name + '\'' +
", description='" + description + '\'' +
'}';
}
}
<file_sep>/src/main/resources/messages.properties
addInstitution.message=Create property
app.titles= Spring Boot Samplessss
delete.message=Usuń
edit-institution.message=Zapisz zmiany
edit.message=Edytuj
institutionDescription.placeholder=Cel i misja fundacji
institutionName.placeholder=Create property<file_sep>/src/main/java/pl/coderslab/charity/controllers/EditInstitutionFormController.java
package pl.coderslab.charity.controllers;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import pl.coderslab.charity.model.DTO.InstitutionDTO;
import pl.coderslab.charity.model.services.InstitutionService;
@Controller
@RequestMapping("/admin")
public class EditInstitutionFormController {
private final InstitutionService institutionService;
public EditInstitutionFormController(InstitutionService institutionService) {
this.institutionService = institutionService;
}
@GetMapping("/institution/edit")
public String prepareInstitutionFormToEdit(Model model,
@RequestParam Long id) {
InstitutionDTO institutionToEdit = institutionService.getById(id);
model.addAttribute("institutionToEdit", institutionToEdit);
return "editInstitutionForm";
}
@PostMapping("/institution/edit")
public String processEditInstitutionForm(InstitutionDTO institutionToEdit) {
institutionService.update(institutionToEdit);
return "redirect:/admin/institutions";
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/DTO/UserDTO.java
package pl.coderslab.charity.model.DTO;
import lombok.Data;
import pl.coderslab.charity.model.entities.Role;
import javax.validation.constraints.NotBlank;
@Data
public class UserDTO {
private Long id;
@NotBlank
private String username;
@NotBlank
private String firstname;
@NotBlank
private String lastname;
@NotBlank
private String email;
@NotBlank
private String password;
private Role authority;
public UserDTO() {
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/repositories/DonationRepository.java
package pl.coderslab.charity.model.repositories;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import pl.coderslab.charity.model.entities.Donation;
public interface DonationRepository extends JpaRepository<Donation, Long> {
@Query("SELECT SUM(d.quantity) AS total FROM Donation d")
Integer getTotalQuantity();
}
<file_sep>/src/main/java/pl/coderslab/charity/model/services/DonationService.java
package pl.coderslab.charity.model.services;
import org.modelmapper.ModelMapper;
import org.springframework.stereotype.Service;
import pl.coderslab.charity.model.DTO.DonationDTO;
import pl.coderslab.charity.model.entities.Donation;
import pl.coderslab.charity.model.repositories.DonationRepository;
import javax.transaction.Transactional;
import java.util.ArrayList;
import java.util.List;
@Service
@Transactional
public class DonationService {
private final ModelMapper modelMapper;
private final DonationRepository donationRepository;
public DonationService(ModelMapper modelMapper, DonationRepository donationRepository) {
this.modelMapper = modelMapper;
this.donationRepository = donationRepository;
}
public void save(DonationDTO DonationDTO) {
donationRepository.save(toEntity(DonationDTO));
}
public List<DonationDTO> getAll() {
List<DonationDTO> DonationDtoList = new ArrayList<>();
for(Donation donation : donationRepository.findAll()) {
DonationDtoList.add(toDto(donation));
}
return DonationDtoList;
}
public void update(DonationDTO DonationDTO) {
donationRepository.save(toEntity(DonationDTO));
}
public void deleteById(Long id) {
donationRepository.deleteById(id);
}
public Integer getTotalQuantity() {
return donationRepository.getTotalQuantity();
}
private DonationDTO toDto(Donation donation) {
return modelMapper.map(donation, DonationDTO.class);
}
private Donation toEntity(DonationDTO DonationDTO) {
return modelMapper.map(DonationDTO, Donation.class);
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/entities/User.java
package pl.coderslab.charity.model.entities;
import lombok.Getter;
import lombok.Setter;
import javax.persistence.*;
@Entity
@Table(name="users")
@Getter @Setter
public class User extends BaseEntity {
@Column(nullable = false, unique = true)
private String username;
private String firstname;
private String lastname;
@Column(nullable = false, unique = true)
private String email;
@Column(nullable = false)
private String password;
private Boolean enabled;
@ManyToOne
private Role authority;
public User() {
super();
this.enabled = false;
}
@Override
public String toString() {
return "User{" +
"name='" + username + '\'' +
", email='" + email + '\'' +
'}';
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/services/CategoryService.java
package pl.coderslab.charity.model.services;
import org.modelmapper.ModelMapper;
import org.springframework.stereotype.Service;
import pl.coderslab.charity.model.entities.Category;
import pl.coderslab.charity.model.repositories.CategoryRepository;
import pl.coderslab.charity.model.DTO.CategoryDTO;
import javax.transaction.Transactional;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Service
@Transactional
public class CategoryService {
private final ModelMapper modelMapper;
private final CategoryRepository categoryRepository;
public CategoryService(ModelMapper modelMapper, CategoryRepository categoryRepository) {
this.modelMapper = modelMapper;
this.categoryRepository = categoryRepository;
}
public void save(CategoryDTO categoryDTO) {
Category category = toEntity(categoryDTO);
categoryRepository.save(category);
}
public List<CategoryDTO> getAll() {
List<CategoryDTO> categoryDtoList = new ArrayList<>();
for(Category category : categoryRepository.findAll()) {
categoryDtoList.add(toDto(category));
}
return categoryDtoList;
}
public void update(CategoryDTO categoryDTO) {
categoryRepository.save(toEntity(categoryDTO));
}
public void deleteById(Long id) {
categoryRepository.deleteById(id);
}
public CategoryDTO getById(Long categoryId) {
Optional<Category> optionalCategory = categoryRepository.findById(categoryId);
return toDto(categoryRepository.findById(categoryId).orElseGet(() -> {
return null;
}));
}
private CategoryDTO toDto(Category category) {
return modelMapper.map(category, CategoryDTO.class);
}
private Category toEntity(CategoryDTO categoryDTO) {
return modelMapper.map(categoryDTO, Category.class);
}
}
<file_sep>/src/main/java/pl/coderslab/charity/controllers/DonationController.java
package pl.coderslab.charity.controllers;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import pl.coderslab.charity.model.services.CategoryService;
import pl.coderslab.charity.model.services.DonationService;
import pl.coderslab.charity.model.services.InstitutionService;
import pl.coderslab.charity.model.DTO.DonationDTO;
@Controller
@RequestMapping("/donation")
public class DonationController {
private DonationService donationService;
private InstitutionService institutionService;
private CategoryService categoryService;
public DonationController(DonationService donationService, InstitutionService institutionService, CategoryService categoryService) {
this.donationService = donationService;
this.institutionService = institutionService;
this.categoryService = categoryService;
}
@GetMapping
public String prepareDonationFormPage(Model model) {
model.addAttribute("categories", categoryService.getAll());
model.addAttribute("institutions", institutionService.getAll());
model.addAttribute(new DonationDTO());
return "donationForm";
}
@PostMapping
public String processDonationForm(DonationDTO donationDTO) {
donationService.save(donationDTO);
return "formConfirmation";
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/entities/Donation.java
package pl.coderslab.charity.model.entities;
import lombok.Getter;
import lombok.Setter;
import org.springframework.format.annotation.DateTimeFormat;
import javax.persistence.*;
import java.time.LocalDate;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.List;
@Entity
@Table(name = "donations")
@Getter @Setter
public class Donation extends BaseEntity{
@Column (nullable = false)
private Integer quantity;
@ManyToMany
private List<Category> categories = new ArrayList<>();
@ManyToOne
private Institution institution;
private String street;
private String City;
private String zipCode;
@DateTimeFormat(pattern = "dd-MM-yyyy")
private LocalDate pickUpDate;
private LocalTime pickUpTime;
private String pickUpComment;
@Override
public String toString() {
return "Donation{" +
super.toString() +
", quantity=" + quantity +
", institution=" + institution +
", street='" + street + '\'' +
", City='" + City + '\'' +
", zipCode='" + zipCode + '\'' +
", pickUpDate=" + pickUpDate +
", pickUpTime=" + pickUpTime +
", pickUpComment='" + pickUpComment + '\'' +
'}';
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/services/TokenService.java
package pl.coderslab.charity.model.services;
import org.modelmapper.ModelMapper;
import org.springframework.stereotype.Service;
import pl.coderslab.charity.model.DTO.TokenDTO;
import pl.coderslab.charity.model.entities.User;
import pl.coderslab.charity.model.entities.VerificationToken;
import pl.coderslab.charity.model.repositories.TokenRepository;
import pl.coderslab.charity.model.repositories.UserRepository;
import javax.transaction.Transactional;
import java.util.Optional;
@Service
@Transactional
public class TokenService {
private final TokenRepository tokenRepository;
private final UserRepository userRepository;
private final ModelMapper modelMapper;
public TokenService(TokenRepository tokenRepository, UserRepository userRepository, ModelMapper modelMapper) {
this.tokenRepository = tokenRepository;
this.userRepository = userRepository;
this.modelMapper = modelMapper;
}
public void createToken(User user,String token) {
VerificationToken verificationToken = new VerificationToken();
verificationToken.setUser(user);
verificationToken.setToken(token);
tokenRepository.save(verificationToken);
}
public void findUserByTokenAndEnable(String token) {
VerificationToken userToken = tokenRepository.findByToken(token);
Optional<User> optionalUser = userRepository.findById(userToken.getUser().getId());
optionalUser.ifPresent(user -> {
user.setEnabled(true);
userRepository.save(user);
});
}
private TokenDTO toDto(VerificationToken verificationToken) {
return modelMapper.map(verificationToken, TokenDTO.class);
}
private VerificationToken toEntity(TokenDTO tokenDTO) {
return modelMapper.map(tokenDTO, VerificationToken.class);
}
}
<file_sep>/src/main/java/pl/coderslab/charity/model/repositories/RoleRepository.java
package pl.coderslab.charity.model.repositories;
import org.springframework.data.jpa.repository.JpaRepository;
import pl.coderslab.charity.model.entities.Role;
public interface RoleRepository extends JpaRepository<Role, Long> {
Role findByAuthority(String authority);
}
<file_sep>/src/main/java/pl/coderslab/charity/controllers/RegisterPageController.java
package pl.coderslab.charity.controllers;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import pl.coderslab.charity.model.DTO.UserDTO;
import pl.coderslab.charity.model.services.TokenService;
import pl.coderslab.charity.model.services.UserService;
@Controller
public class RegisterPageController {
private final UserService userService;
private final TokenService tokenService;
public RegisterPageController(UserService userService, TokenService tokenService) {
this.userService = userService;
this.tokenService = tokenService;
}
@GetMapping("/add")
public String prepareRegisterPage(Model model) {
model.addAttribute(new UserDTO());
return "register";
}
@PostMapping("/add")
public String processRegisterPage(UserDTO userDTO){
userDTO.setId(null);
userService.saveUser(userDTO);
return "confirmationRequest";
}
@RequestMapping("/activate")
public String processActivationLink(@RequestParam String token) {
tokenService.findUserByTokenAndEnable(token);
return "redirect:/login";
}
}
<file_sep>/src/test/java/pl/coderslab/charity/HomeControllerTest.java
package pl.coderslab.charity;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import pl.coderslab.charity.config.SecurityConfig;
import pl.coderslab.charity.controllers.HomeController;
import pl.coderslab.charity.model.DTO.InstitutionDTO;
import pl.coderslab.charity.model.services.DonationService;
import pl.coderslab.charity.model.services.InstitutionService;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@RunWith(SpringRunner.class)
@WebMvcTest(HomeController.class)
public class HomeControllerTest {
@Autowired
private MockMvc mockMvc;
// @Autowired
// private ModelMapper modelMapper;
private final String HOME_VIEW = "index";
public HomeControllerTest() {
}
@MockBean
private InstitutionService institutionService;
@MockBean
private DonationService donationService;
@Test
public void test_home_contains_institution_list() throws Exception {
List<InstitutionDTO> institutions = Arrays.asList(new InstitutionDTO());
when(this.institutionService.getAll()).thenReturn(institutions);
this.mockMvc.perform(get("/"))
.andExpect(model().attributeExists("institutions"))
.andExpect(model().attribute("institutions", hasSize(1)))
.andExpect(status().isOk())
.andExpect(view().name(HOME_VIEW));
}
}
| 50ea748d0e19d04fc4f4b1733cc1b245d8d386bb | [
"Java",
"INI"
] | 18 | Java | Joanna026/Charity-donation-web-app | e1a9bd94d81e8ae804ec7133ef6e273ae07e8387 | 14a73514f7990c94d4246992bf3bc5b1ee7e1daf |
refs/heads/master | <repo_name>santosh1817/fullStack-attainu-User-reg-login-NormalReact<file_sep>/client/src/components/city/City.js
import React from 'react'
const City=(props)=>{
return(
<div>
<div className="jumbotron">
<h1>List of cities!</h1>
</div>
</div>
)
}
export default City | 2dfc84b3902099662bbfadaee12ab288e148a426 | [
"JavaScript"
] | 1 | JavaScript | santosh1817/fullStack-attainu-User-reg-login-NormalReact | 73681f68af458c4324294beb46d5f75fb86e7c1f | 04024ae644fff4cdea79d6cf5602ec42ecd674ae |
refs/heads/master | <repo_name>WimJongeneel/CMTPRG6-vuejs<file_sep>/src/routes.js
export default {
'/': 'home',
'/new': 'new',
'/:id': 'details',
'/:id/edit': 'edit',
}
| 3445d01b64106f60fce80285706ddfe8ecbe4ebf | [
"JavaScript"
] | 1 | JavaScript | WimJongeneel/CMTPRG6-vuejs | fa0aeb6a213f8ad948887c3c397309fb8f5c6ac2 | 45ff299c347f7403eb08af57534075f3a43fcb23 |
refs/heads/master | <file_sep>#!/bin/bash
VMWARE_VERSION=workstation-14.1.7 #This needs to be the actual name of the appropriate branch in mkubecek's GitHub repo for your purposes
TMP_FOLDER=/tmp/patch-vmware
rm -fdr $TMP_FOLDER
mkdir -p $TMP_FOLDER
cd $TMP_FOLDER
git clone https://github.com/mkubecek/vmware-host-modules.git #Use `git branch -a` to find all available branches and find the one that's appropriate for you
cd $TMP_FOLDER/vmware-host-modules
git checkout $VMWARE_VERSION
git fetch
make
sudo make install
sudo rm /usr/lib/vmware/lib/libz.so.1/libz.so.1
sudo ln -s /lib/x86_64-linux-gnu/libz.so.1 /usr/lib/vmware/lib/libz.so.1/libz.so.1
systemctl restart vmware && vmplayer & | d561242a2432be3636039cb3d8fc3d91da230f10 | [
"Shell"
] | 1 | Shell | t3j0n/VMWare | a86b010744c64bbf241b880d2f8440fe923176b1 | 45829f98165fbcc2cd819730b2a1de1ed6e1ba73 |
refs/heads/master | <repo_name>AndroidMediaCodec/cv4faces<file_sep>/python code/binary image processing/erosion.py
import cv2
# read image
image = cv2.imread("../../images/truth.png", cv2.IMREAD_COLOR)
# get structuring kernel which used for erosion
erosionSize = 6
element = cv2.getStructuringElement(shape=cv2.MORPH_CROSS,
ksize=(2 * erosionSize + 1, 2 * erosionSize + 1),
anchor=(erosionSize, erosionSize))
# erode the image
erodedImage = cv2.erode(src=image, kernel=element)
# display image
cv2.imshow("Original Image", image)
cv2.imshow("Eroded Image", erodedImage)
cv2.waitKey(0)
<file_sep>/c++ code/week1/examples/binary image processing/binary image processing/erosion.cpp
#include <opencv2/opencv.hpp>
int main3() {
cv::Mat image = cv::imread("../../../../../images/truth.png", cv::IMREAD_COLOR);
int erosionSize = 6;
cv::Mat element = cv::getStructuringElement(cv::MORPH_CROSS,
cv::Size(2 * erosionSize + 1, 2 * erosionSize + 1),
cv::Point(erosionSize, erosionSize));
cv::Mat erodedImage;
cv::erode(image, erodedImage, element);
cv::imshow("Original Image", image);
cv::imshow("Eroded Image", erodedImage);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/python code/highgui/trackbar.py
import cv2
thresholdValue = 200
thresholdType = 3
maxValue = 255
maxType = 4
maxBinaryValue = 255
windowName = "Threshold Demo"
trackbatType = "Type:\n 0: Binary\n 1: Binary Inverted\n 2: Truncate\n 3: To Zero\n 4: To Zero Inverted"
trackbarValue = "Value"
# call the function to initialize
# 0: Binary
# 1: Binary Inverted
# 2: Threshold Truncated
# 3: Threshold to zero
# 4: Threshold to inverted
# load image
gray = cv2.imread('../../images/threshold.png', cv2.IMREAD_GRAYSCALE)
# create a window to display results
cv2.namedWindow(windowName, cv2.WINDOW_AUTOSIZE)
def thresholdTypeDemo(*args):
global thresholdType
thresholdType = args[0]
thresh, result = cv2.threshold(gray, thresholdValue, maxBinaryValue, thresholdType)
cv2.imshow(windowName, result)
def thresholdValueDemo(*args):
global thresholdValue
thresholdValue = args[0]
thresh, result = cv2.threshold(gray, thresholdValue, maxBinaryValue, thresholdType)
cv2.imshow(windowName, result)
# create trackbar to choos type of threshold
cv2.createTrackbar(trackbatType,
windowName,
thresholdType,
maxType,
thresholdTypeDemo)
cv2.createTrackbar(trackbarValue,
windowName,
thresholdValue,
maxValue,
thresholdValueDemo)
thresholdTypeDemo(0)
thresholdValueDemo(0)
# wait until user finishes program
while True:
c = cv2.waitKey(20)
if c == 27:
break
<file_sep>/python code/basic functions/datatypeconversion.py
import cv2
import numpy as np
# read image
source = cv2.imread("../../images/lena.jpg", flags=1)
scalingFactor = 1 / 255.0
# convert unsigned int to float
source = np.float32(source)
source = source * scalingFactor
# convert back to unsigned int
source = source * (1.0 / scalingFactor)
source = np.uint8(source)<file_sep>/python code/highgui/highguikeyboard.py
import cv2
# opencv webcam
cap = cv2.VideoCapture(0)
k = 0
while True:
# read frame
ret, frame = cap.read()
# identify if 'ESC' is pressed
if k == 27:
break
# identify if 'e' or 'E' is pressed
if k == 101 or k == 69:
cv2.putText(frame,
'E is pressed, press Z or ESC',
(100, 180),
cv2.FONT_HERSHEY_SIMPLEX,
1,
(0, 255, 0),
3)
# identify if 'z' or 'Z' is pressed
if k == 90 or k == 122:
cv2.putText(frame,
'Z is pressed',
(100, 180),
cv2.FONT_HERSHEY_SIMPLEX,
1,
(0, 255, 0),
3)
cv2.imshow("Image", frame)
# increase waitkey to show display properly
k = cv2.waitKey(10000) & 0xFF
<file_sep>/c++ code/week1/examples/highgui/highgui/highguikeyboard.cpp
#include <opencv2/videoio/videoio.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <iostream>
#include <string>
int main1() {
// open webcam
cv::VideoCapture cap(0);
cv::Mat frame;
int k = 0;
// detect of webcam is working properly
if (!cap.isOpened()) {
std::cout << "Unable to detect webcam\n";
return EXIT_FAILURE;
}
while (1) {
// capture frame
cap >> frame;
if (k == 27) break;
// identify if 'e' or 'E' is pressed
if (k == 101 || k == 69) {
cv::putText(frame, "E is pressed, press Z or ESC", cv::Point(100, 180), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(0, 255, 0), 3);
}
if (k == 90 || k == 122) {
cv::putText(frame, "Z is pressed", cv::Point(100, 180), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(0, 255, 0), 3);
}
cv::imshow("Image", frame);
// waitKey is increased so the the display is shown
k = cv::waitKey(10000) & 0xFF;
}
return EXIT_SUCCESS;
}<file_sep>/python code/basic functions/warpaffine.py
import cv2
import numpy as np
# read image
source = cv2.imread("../../images/lena.jpg", flags=1)
# create 2 warp matrices / masks for different transformations
warpMat1 = np.float32([[1.2, 0.2, 2], [-0.3, 1.3, 1]])
warpMat2 = np.float32([[1.2, 0.3, 2], [0.2, 1.3, 1]])
# use warp affine
dim = source.shape
result1 = cv2.warpAffine(source, warpMat1, (int(1.5 * dim[0]), int(1.4 * dim[1])),
None, flags=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT_101)
result2 = cv2.warpAffine(source, warpMat2, (int(1.5 * dim[0]), int(1.4 * dim[1])),
None, flags=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT_101)
# display images
cv2.imshow("Original", source)
cv2.imshow("Result1", result1)
cv2.imshow("Result2", result2)
cv2.waitKey(0)
<file_sep>/c++ code/week1/examples/basic functions/basic functions/readwritedisplay.cpp
#include <opencv2/opencv.hpp>
int main1()
{
// read image
cv::Mat image = cv::imread("../../../../../images/lena.jpg", cv::IMREAD_ANYCOLOR);
// check for invalid input
if (image.empty()) {
std::cout << "Coutld not open or find image\n";
return EXIT_FAILURE;
}
// convert color image to gray
cv::Mat grayImage;
cv::cvtColor(image, grayImage, cv::COLOR_BGR2GRAY);
// create a window for display
cv::namedWindow("Image", CV_WINDOW_AUTOSIZE);
cv::namedWindow("Gray Image", CV_WINDOW_NORMAL);
// display image
cv::imshow("Image", image);
cv::imshow("Gray Image", grayImage);
// wait for a keystroke in the window
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/python code/video processing/videowrite.py
import cv2
# create a VideoCapture object
cap = cv2.VideoCapture(0)
# check if camera opened successfully
if not cap.isOpened():
print("Unable to read camera feed")
# default resolution of frame are obtained.
# the default resolution are system dependent.
# convert the resolution from float to integer.
frame_width = int(cap.get(3))
frame_height = int(cap.get(4))
# define the codec and create VideoWrite object
# store in 'output.avi' file
out = cv2.VideoWriter('output.avi',
cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'),
10,
(frame_width, frame_height))
while True:
ret, frame = cap.read()
if ret:
# write the frame to 'output.avi'
out.write(frame)
# display the result
cv2.imshow('Frame', frame)
if cv2.waitKey(25) & 0xFF == 27:
break
# break the loop
else:
break
# when everything is done, release video capture object
cap.release()
out.release()
# close all the windows
cv2.destroyAllWindows()
<file_sep>/c++ code/week1/examples/basic functions/basic functions/rotate.cpp
#include <opencv2/opencv.hpp>
int main3() {
cv::Mat source, M, result;
// read image
source = cv::imread("../../../../../images/lena.jpg", cv::IMREAD_COLOR);
cv::Point2f center((source.cols / 2.0), (1.0 * source.rows / 2.0));
double rotationAngle = 30;
double scale = 1;
// getting the matrix which will define the rotation
M = cv::getRotationMatrix2D(center, rotationAngle, scale);
// rotate the source and store in result
cv::warpAffine(source, result, M, cv::Size(source.cols, source.rows));
// create windows to display
cv::namedWindow("Original Image", cv::WINDOW_AUTOSIZE);
cv::namedWindow("Rotated Image", cv::WINDOW_AUTOSIZE);
// display images
cv::imshow("Original Image", source);
cv::imshow("Rotated Image", result);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/python code/basic functions/readwritedisplay.py
import cv2
image = cv2.imread("../../images/lena.jpg")
# check for invalid input
if image is None:
print("Could not open or find the image")
# convert color image to gray
grayImage = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# save result
cv2.imwrite("imageGray.jpg", grayImage)
# create a window for display
cv2.namedWindow("Image", cv2.WINDOW_AUTOSIZE)
cv2.namedWindow("Gray Image", cv2.WINDOW_NORMAL)
# display image
cv2.imshow("Image", image)
cv2.imshow("Gray Image", grayImage)
# wait for a keystroke in window
cv2.waitKey(0)
<file_sep>/c++ code/week1/examples/highgui/highgui/trackbar.cpp
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <string>
int thresholdValue = 150;
int thresholdType = 3;
int const maxValue = 255;
int const maxType = 4;
int const maxBinaryValue = 255;
cv::Mat img, gray, dst;
std::string windowName = "Threshold Demo";
std::string trackbarType = "Type:\n 0: Binary\n 1: Binary Inverted\n 2: Truncate\n 3: To zero\n 4: To Zero Inverted";
std::string trackbarValue = "Value";
void thresholdDemo(int, void*);
int main()
{
// load image
img = cv::imread("../../../../../images/threshold.png", cv::IMREAD_COLOR);
// convert image to gray
cv::cvtColor(img, gray, cv::COLOR_RGB2GRAY);
// create a window to display results
cv::namedWindow(windowName, CV_WINDOW_AUTOSIZE);
// create trackbar to choose type of threshold
cv::createTrackbar(trackbarType, windowName, &thresholdType, maxType, thresholdDemo);
cv::createTrackbar(trackbarValue, windowName, &thresholdValue, maxValue, thresholdDemo);
// call he function to initialize
thresholdDemo(0, 0);
// wait until user finishes program
while (true) {
int c;
c = cv::waitKey(20);
if (static_cast<char>(c) == 27) break;
}
return EXIT_SUCCESS;
}
void thresholdDemo(int, void*) {
/*
0: Binary
1: Binary Inverted
2: Threshold Truncated
3: Threshold to Zero
4: Threshold to Zero Inverted
*/
cv::threshold(gray, dst, thresholdValue, maxBinaryValue, thresholdType);
cv::imshow(windowName, dst);
}<file_sep>/python code/binary image processing/dilation.py
import cv2
# read image
image = cv2.imread("../../images/truth.png", cv2.IMREAD_COLOR)
# check for invalid input
if image is None:
print("Could not open or find image")
# get structuring kernel which used for in dilation
dilationSize = 6
element = cv2.getStructuringElement(shape=cv2.MORPH_CROSS,
ksize=(2 * dilationSize + 1, 2 * dilationSize + 1),
anchor=(dilationSize, dilationSize))
# dilating the image
dilatedImage = cv2.dilate(src=image, kernel=element)
# display images
cv2.imshow("Original Image", image)
cv2.imshow("Dilated Image", dilatedImage)
cv2.waitKey(0)
<file_sep>/c++ code/week1/examples/basic functions/basic functions/datatypeconversion.cpp
#include <opencv2/opencv.hpp>
int main()
{
cv::Mat source;
// scale will convert pixel values
double scale = 1 / 255.0;
double shift = 0;
// read image
source = cv::imread("../../../../../images/lena.jpg");
// convert from usigned char to float-32 bit
source.convertTo(source, CV_32FC3, scale, shift);
// convert from float to unsigned char
source.convertTo(source, CV_8UC3, 1.0 / scale, shift);
return EXIT_SUCCESS;
}<file_sep>/c++ code/week1/examples/binary image processing/binary image processing/closing.cpp
#include <opencv2/opencv.hpp>
int main()
{
// read image
cv::Mat image = cv::imread("../../../../../images/closing.png", cv::IMREAD_GRAYSCALE);
// initialize number of iteration
int iterations = 1;
// create a structuring kernel
int closingSize = 10;
cv::Mat element = cv::getStructuringElement(cv::MORPH_ELLIPSE,
cv::Size(2 * closingSize + 1, 2 * closingSize + 1),
cv::Point(closingSize, closingSize));
// apply morphological closing
cv::Mat morphClosedImage;
cv::morphologyEx(image, morphClosedImage, cv::MORPH_CLOSE, element, cv::Point(-1, -1), iterations);
// display images
cv::imshow("Original Image", image);
cv::imshow("Closing Image", morphClosedImage);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/c++ code/week1/examples/binary image processing/binary image processing/thresholding.cpp
#include <opencv2/opencv.hpp>
int main1() {
// ead an image in grayscale
cv::Mat src = cv::imread("../../../../../images/threshold.png", cv::IMREAD_GRAYSCALE);
cv::Mat dst;
// set threshold and maximum vlaues
double thresh = 0;
double maxValue = 255;
// binary threshold
// val > thresh -> val = MaxValue
// val <= threh -> val = 0
cv::threshold(src, dst, thresh, maxValue, cv::THRESH_BINARY);
// display images
cv::imshow("Original Image", src);
cv::imshow("Threshold Image", dst);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/python code/highgui/highguimouse.py
import cv2
import math
# lists to store points
center = []
circumference = []
def drawCircle(action, x, y, flags, userdata):
# referencing global variables
global center, circumference
# action to be taken when left mouse button is pressed
if action == cv2.EVENT_LBUTTONDOWN:
center = [(x, y)]
# mark the center
cv2.circle(source,
center[0],
1,
(0, 255, 0),
2,
cv2.LINE_AA)
# action to be taken when left mouse button is released
if action == cv2.EVENT_LBUTTONUP:
circumference = [(x, y)]
# calculate radius of circle
radius = math.sqrt(math.pow(center[0][0] - circumference[0][0], 2) +
math.pow(center[0][1] - circumference[0][1], 2))
# draw circle
cv2.circle(source,
center[0],
int(radius),
(0, 255, 0),
2,
cv2.LINE_AA)
cv2.imshow("Window", source)
source = cv2.imread("../../images/lena.jpg", 1)
# make dummy image, will be useful t clear the drawing
dummy = source.copy()
cv2.namedWindow("Window")
# high-gui function called when mouse event occur
cv2.setMouseCallback("Window", drawCircle)
k = 0
# loop until escape character is pressed
while k != 27:
cv2.imshow("Window", source)
cv2.putText(source,
'Choose center and drag, Press ESC to exit and c to clear',
(10, 30),
cv2.FONT_HERSHEY_SIMPLEX,
0.7,
(255, 255, 255),
2)
k = cv2.waitKey(20) & 0xFF
# another way to cloning
if k == 99:
source = dummy.copy()
cv2.destroyAllWindows()
<file_sep>/c++ code/week1/examples/basic functions/basic functions/cropandresize.cpp
#include <opencv2/opencv.hpp>
int main2()
{
cv::Mat source, scaleDown, scaleUp;
// read souce image
source = cv::imread("../../../../../images/lena.jpg");
// scaling factors
double scaleX = 0.6;
double scaleY = 0.6;
// scaling down the image 0.6 times
cv::resize(source, scaleDown, cv::Size(), scaleX, scaleY, cv::INTER_LINEAR);
// scaling up the image 1.8 times
cv::resize(source, scaleUp, cv::Size(), scaleX * 3, scaleY * 3, cv::INTER_LINEAR);
// cropped image
cv::Mat crop = source(cv::Range(50, 150), cv::Range(20, 200));
// create dsiplay windows for all three images
cv::namedWindow("Original", cv::WINDOW_AUTOSIZE);
cv::namedWindow("Scale Down", cv::WINDOW_AUTOSIZE);
cv::namedWindow("Scale Up", cv::WINDOW_AUTOSIZE);
cv::namedWindow("Croped Image", cv::WINDOW_AUTOSIZE);
// show image
cv::imshow("Original", source);
cv::imshow("Scale Down", scaleDown);
cv::imshow("Scale Up", scaleUp);
cv::imshow("Cropped Image", crop);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/python code/binary image processing/opening.py
import cv2
# read image
image = cv2.imread("../../images/opening.png", cv2.IMREAD_GRAYSCALE)
# initialize number of iteration
iterations = 3
# get structuring kernel
openingSize = 3
element = cv2.getStructuringElement(shape=cv2.MORPH_ELLIPSE,
ksize=(2 * openingSize + 1, 2 * openingSize + 1),
anchor=(openingSize, openingSize))
# apply morphological opening
morphOpenedImage = cv2.morphologyEx(src=image,
op=cv2.MORPH_OPEN,
kernel=element,
iterations=3)
# display images
cv2.imshow("Original Image", image)
cv2.imshow("Opening Image", morphOpenedImage)
cv2.waitKey(0)
<file_sep>/c++ code/week1/examples/basic functions/basic functions/warpaffine.cpp
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <iostream>
#include <cmath>
#include <vector>
int main4()
{
// read image
cv::Mat source = cv::imread("../../../../../images/lena.jpg");
// create 2 warp matrices for diferent transformations
cv::Mat warpMat1 = (cv::Mat_<double>(2, 3) << 1.2, 0.2, 2, -0.3, 1.3, 1);
cv::Mat warpMat2 = (cv::Mat_<double>(2, 3) << 1.2, 0.3, 2, 0.2, 1.3, 1);
cv::Mat result1, result2;
// use warp affine
cv::warpAffine(source, result1, warpMat1, cv::Size(source.cols, source.rows), cv::INTER_LINEAR, cv::BORDER_REFLECT_101);
cv::warpAffine(source, result2, warpMat2, cv::Size(source.rows, source.cols), cv::INTER_LINEAR, cv::BORDER_REFLECT_101);
// display image
cv::imshow("Original", source);
cv::imshow("Result1", result1);
cv::imshow("Result2", result2);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/c++ code/week1/examples/binary image processing/binary image processing/opening.cpp
#include <opencv2/opencv.hpp>
int main4()
{
// read image
cv::Mat image = cv::imread("../../../../../images/opening.png", cv::IMREAD_GRAYSCALE);
// initialize number of iterations
int iterations = 3;
// create struturing element
int openingSize = 3;
cv::Mat element = cv::getStructuringElement(cv::MORPH_ELLIPSE,
cv::Size(2 * openingSize + 1, 2 * openingSize + 1),
cv::Point(openingSize, openingSize));
// apply morphological opening
cv::Mat morphOpenedImage;
cv::morphologyEx(image, morphOpenedImage, cv::MORPH_OPEN, element, cv::Point(-1, -1), iterations);
// display images
cv::imshow("Original Image", image);
cv::imshow("Opening Image", morphOpenedImage);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/c++ code/week1/examples/basic functions/basic functions/getaffinetransform.cpp
#include <opencv2/opencv.hpp>
#include <iostream>
#include <cmath>
#include <vector>
int main5()
{
// input triangle
std::vector<cv::Point2f> tri1;
tri1.push_back(cv::Point2f(50, 50));
tri1.push_back(cv::Point2f(180, 140));
tri1.push_back(cv::Point2f(150, 200));
// output triangle
std::vector<cv::Point2f> tri2;
tri2.push_back(cv::Point2f(72, 51));
tri2.push_back(cv::Point2f(246, 129));
tri2.push_back(cv::Point2f(222, 216));
// another output triangle
std::vector<cv::Point2f> tri3;
tri3.push_back(cv::Point2f(77, 76));
tri3.push_back(cv::Point2f(260, 219));
tri3.push_back(cv::Point2f(242, 291));
// get the transformation matrices
cv::Mat warp1 = cv::getAffineTransform(tri1, tri2);
cv::Mat warp2 = cv::getAffineTransform(tri1, tri3);
// display the matrices
std::cout << warp1 << "\n\n\n" << warp2 << "\n";
return EXIT_SUCCESS;
}<file_sep>/c++ code/week1/examples/binary image processing/binary image processing/dilation.cpp
#include <opencv2/opencv.hpp>
#include <iostream>
int main2() {
// read image in and store in cv::Mat image
cv::Mat image = cv::imread("../../../../../images/truth.png", cv::IMREAD_COLOR);
// create a structuring element
int dilationSize = 6;
cv::Mat element = cv::getStructuringElement(cv::MORPH_CROSS,
cv::Size(2 * dilationSize + 1, 2 * dilationSize + 1),
cv::Point(dilationSize, dilationSize));
// dilated image stored in cv::Mat
cv::Mat dilatedImage;
// dilation will increase brightness
cv::dilate(image, dilatedImage, element);
// display images
cv::imshow("Original Image", image);
cv::imshow("Dilated Image", dilatedImage);
cv::waitKey(0);
return EXIT_SUCCESS;
}<file_sep>/python code/basic functions/getaffine.py
import cv2
import numpy as np
# input triangle
inp = np.float32([[50, 50], [180, 140], [150, 200]])
# output triangle
output1 = np.float32([[72, 51], [246, 129], [222, 216]])
# another output
output2 = np.float32([[77, 76], [260, 219], [242, 291]])
# get the transformation matrices
warpMat1 = cv2.getAffineTransform(inp, output1)
warpMat2 = cv2.getAffineTransform(inp, output2)
# display the matrices
print(warpMat1, '\n\n', warpMat2)<file_sep>/python code/basic functions/cropandresize.py
import cv2
# reading the image
source = cv2.imread("../../images/lena.jpg", flags=1)
scaleX = 0.6
scaleY = 0.6
# scaling down the image 0.6 times
scaleDown = cv2.resize(source, None, fx=scaleX, fy=scaleY, interpolation=cv2.INTER_LINEAR)
# scaling up the image 1.8 times
scaleUp = cv2.resize(source, None, fx=scaleY * 3, fy=scaleY * 3, interpolation=cv2.INTER_LINEAR)
# crop image
crop = source[50:150, 20:200]
# display all the images
cv2.imshow("Original", source)
cv2.imshow("Scaled Down", scaleDown)
cv2.imshow("Scaled Up", scaleUp)
cv2.imshow("Cropped Image", crop)
cv2.waitKey(0)
<file_sep>/python code/binary image processing/thresholding.py
import cv2
# read an image in grayscale
src = cv2.imread("../../images/threshold.png", cv2.IMREAD_GRAYSCALE)
# set threshold and maximum value
thresh = 0
maxValue = 255
# binary threshold
th, dst = cv2.threshold(src=src, thresh=thresh, maxval=maxValue, type=cv2.THRESH_BINARY)
# display images
cv2.imshow("Original Image", src)
cv2.imshow("Thresholded Image", dst)
cv2.waitKey(0)
<file_sep>/python code/binary image processing/closing.py
import cv2
# read image
image = cv2.imread("../../images/closing.png", cv2.IMREAD_GRAYSCALE)
# initialize number of iteration
iterations = 1
# get structuring kernel
closingSize = 10
element = cv2.getStructuringElement(shape=cv2.MORPH_ELLIPSE,
ksize=(2 * closingSize + 1, 2 * closingSize + 1),
anchor=(closingSize, closingSize))
# apply morphological closing
morphClosingImage = cv2.morphologyEx(src=image, op=cv2.MORPH_CLOSE, kernel=element, iterations=iterations)
# display images
cv2.imshow("Original Image", image)
cv2.imshow("Closing Image", morphClosingImage)
cv2.waitKey(0)
<file_sep>/python code/basic functions/rotate.py
import cv2
# read image
source = cv2.imread("../../images/lena.jpg", flags=1)
# get the dimensions of the image
dim = source.shape
rotationAngle = 180
scaleFactor = 1
# rotate the image by 90 degrees about the center
# dim[0] stores the number of rows and dim[1] stores the number of columns
rotationMatrix = cv2.getRotationMatrix2D((dim[1] / 2, dim[0] / 2), rotationAngle, scaleFactor)
result = cv2.warpAffine(source, rotationMatrix, (dim[1], dim[0]))
# show images
cv2.imshow("Original", source)
cv2.imshow("Rotated Image", result)
cv2.waitKey(0)
<file_sep>/c++ code/week1/examples/video manipulating/video manipulating/videoread.cpp
#include <opencv2/opencv.hpp>
int main1() {
// create a VideoCapture objet and open the input file
// if the input is the web camera, pass 0 instead of the video file name
cv::VideoCapture cap("../../../../../videos/chaplin.mp4");
// check if camera opened successfully and read a frame from the object cap
if (!cap.isOpened()) {
std::cout << "Error opening video stream or file\n";
return EXIT_FAILURE;
}
while (1) {
cv::Mat frame;
// capture frame by frame
cap >> frame;
// if the frame is empty, break immediately
if (frame.empty()) break;
// display the resulting frame
cv::imshow("Frame", frame);
// press ESC on keyboard to exit
char c = (char)cv::waitKey(25);
if (c == 27) break;
}
// when everything done, release the video capture object
cap.release();
// close all the frames
cv::destroyAllWindows();
return EXIT_SUCCESS;
}<file_sep>/python code/video processing/videoread.py
import cv2
# create a VideoCapture object open the input file
# if the input is the web camera, pass 0 instead of the video file name
cap = cv2.VideoCapture('../../videos/chaplin.mp4')
# check if camera opened successfully
if not cap.isOpened():
print("Error opening video stream or file")
while cap.isOpened():
# capture frame by frame
ret, frame = cap.read()
if ret:
# display the result
cv2.imshow('Frame', frame)
# press ESC to exit
if cv2.waitKey(25) & 0xFF == 27:
break
# break the loop
else:
break
# when everything done, release the video capture object
cap.release()
# close all frames
cv2.destroyAllWindows()
<file_sep>/c++ code/week1/examples/highgui/highgui/highguimouse.cpp
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <iostream>
#include <vector>
#include <cmath>
// points to store the center of the circle and a point on the circumference
cv::Point center, circumference;
// source image
cv::Mat source;
// function which will be called on mouse input
void drawCircle(int action, int x, int y, int flags, void *userdata) {
// action to be taken when left mouse button is pressed
if (action == cv::EVENT_LBUTTONDOWN) {
center = cv::Point(x, y);
// mark the center
cv::circle(source, center, 1, cv::Scalar(255, 255, 0), 2, CV_AA);
}
// action to be taken when left mouse button is released
else if (action == cv::EVENT_LBUTTONUP) {
circumference = cv::Point(x, y);
// calculate radius of the circle
float radius = std::sqrt(std::pow(center.x - circumference.x, 2) + std::pow(center.y - circumference.y, 2));
// draw the circle
cv::circle(source, center, radius, cv::Scalar(0, 255, 0), 2, CV_AA);
cv::imshow("Image", source);
}
}
int main2() {
source = cv::imread("../../../../../images/lena.jpg", 1);
// make a dummy image, will be useful to clear the drawing
cv::Mat dummy = source.clone();
cv::namedWindow("Window");
// highgui function called when mouse events occur
cv::setMouseCallback("Window", drawCircle);
int k = 0;
// loop until escape character is pressed
while (k != 27) {
cv::imshow("Window", source);
cv::putText(source, "Choose center, and drag, Press ESC to exit and c to clear", cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 0.7, cv::Scalar(255, 255, 255), 2);
k = cv::waitKey(20) & 0xFF;
if (k == 99) dummy.copyTo(source);
}
return EXIT_SUCCESS;
}<file_sep>/c++ code/week1/examples/video manipulating/video manipulating/videowrite.cpp
#include <opencv2/opencv.hpp>
int main() {
// create a VideoCapture object and use camera to capture the video
cv::VideoCapture cap(0);
// check if camera opened successfully
if (!cap.isOpened()) {
std::cout << "Error opening video stream\n";
return EXIT_FAILURE;
}
// default resolution of the frame are obtained. The default resolutions system dependent.
int frame_width = cap.get(CV_CAP_PROP_FRAME_WIDTH);
int frame_height = cap.get(CV_CAP_PROP_FRAME_HEIGHT);
// define the codec and create VideoWriter object. The output is stored in 'output.avi' file.
cv::VideoWriter video("output.avi", CV_FOURCC('M', 'J', 'P', 'G'), 10, cv::Size(frame_width, frame_height));
// read and save the feed from webcam until ESC is pressed.
while (1) {
cv::Mat frame;
// capture frame by frame
cap >> frame;
// if the frame is empty, break immediately
if (frame.empty()) break;
// write the frame into the file 'output.avi'
video.write(frame);
// display the frame
cv::imshow("Frame", frame);
// press ESC to exit
char c = (char)cv::waitKey(1);
if (c == 27) break;
}
// when everything is done, release the video capture object
cap.release();
video.release();
// close all the frames
cv::destroyAllWindows();
return EXIT_SUCCESS;
} | 3f58fff76162db4026e22f3587a2f2425dd43f27 | [
"Python",
"C++"
] | 32 | Python | AndroidMediaCodec/cv4faces | 17424715206454754dfdc343b54d4682d01abe9f | 30e61a59a5aae6ccd6598b8163717493405b06c6 |
refs/heads/master | <file_sep>(function() {
chrome.storage.local.get('grayscale', function(result){
grayscale = result.grayscale || false;
chrome.browserAction.onClicked.addListener(function () {
grayscale = !grayscale;
chrome.storage.local.set({'grayscale':grayscale});
if(grayscale){
chrome.browserAction.setIcon({
path: "off.png"
});
chrome.browserAction.setTitle({
title: "คลิกเพื่อเข้าโหมดภาพสี"
});
chrome.tabs.executeScript(null, {
code: 'css.innerHTML = "body { filter: grayscale(1) !important; }";'
});
}else{
chrome.browserAction.setIcon({
path: "on.png"
});
chrome.browserAction.setTitle({
title: "คลิกเพื่อเข้าโหมดขาวดำ"
});
chrome.tabs.executeScript(null, {
code: 'css.innerHTML = "html *,* { filter: initial !important; }";'
});
}
});
});
})(); | efd10980df46c9c1e0cad8c0aa13a02b27991176 | [
"JavaScript"
] | 1 | JavaScript | earthchie/colorization_chrome | 9f66dff4e1402a3c7f28d226b37a32bf3cc3d903 | ddd518f8c1f30846ebabaf254df5b9a8c2da84dd |
refs/heads/master | <repo_name>santana69/cse170-group2<file_sep>/README.md
CSE170/COGS120 Project
Studio: Resolutions @ 11:30am
Team 2:
<NAME>
<NAME>
<NAME> A99420007
<file_sep>/public/js/index_history_bottom.js
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
//time when page was loaded
var loadDate = new Date().getTime();
//To prevent buttons from staying focused after press
$(".btn").mouseup(function(){
$(this).blur();
});
/*
* Click listeners for money choices
*/
$(".btn-choice-1").click(function(e) {
//check if active
if ($(this).hasClass("active")) {
//active, unset active
$(this).toggleClass("active");
//set save button disabled
$(this).closest('.row').find('.btn-save').attr('disabled', true);
}
else {
//not active, set active
$(this).toggleClass("active");
//remove active from other button (if applicable)
$(this).closest('.row').find('.btn-choice-2').removeClass('active');
//set save button enabled
$(this).closest('.row').find('.btn-save').attr('disabled', false);
}
});
/*
* Click listeners for money choices
*/
$(".btn-choice-2").click(function(e) {
//check if active
if ($(this).hasClass("active")) {
//active, unset active
$(this).toggleClass("active");
//set save button disabled
$(this).closest('.row').find('.btn-save').attr('disabled', true);
}
else {
//not active, set active
$(this).toggleClass("active");
//remove active from other button (if applicable)
$(this).closest('.row').find('.btn-choice-1').removeClass('active');
//set save button enabled
$(this).closest('.row').find('.btn-save').attr('disabled', false);
}
});
$(".my-causes-body").click(function(e) {
window.open("/my_cause_detail/" + $(this).attr('id').substr('cause'.length), "_self");
});
$('.btn-save').click(function(e) {
//calculate time elapsed since page load
var now = new Date().getTime();
var timeElapsed = now - loadDate;
//send time event to GA
ga('send', 'timing', 'save', 'homeHistoryOnBottom', timeElapsed);
var id_cause = $(this).closest('.my-panel').find('.my-causes-body').attr('id').substr('cause'.length);
var amountToAdd = $(this).closest('.row').find('.btn-choice.active').attr('value');
window.open("/addMoneyToCause/index/"+id_cause+"/"+amountToAdd, "_self");
});
$('.donate-button').click(function(e) {
var id_cause = $(this).closest('.my-panel').find('.my-causes-body').attr('id').substr('cause'.length);
$('#donationConfirmModal').find('#donateButton').attr('value', id_cause);
$('#donationConfirmModal').modal('show');
});
$('#donateButton').click(function(e) {
var id_cause = $(this).attr('value');
$('#donationModal').find('#donateAnchor').attr('href', "/donateToCause/" + id_cause);
$('#donationConfirmModal').modal('hide');
$('#donationModal').modal('show');
});
}<file_sep>/routes/charities.js
//var fullData = require('./index').fullData;
//var achievements = require('./achievements').achievements;
//require DB model
var models = require('../models');
exports.view = function(req, res) {
//models.User.findByIdAndUpdate(req.user._id, {$set: {"my_causes" : []}}, function(err,affected){});
//Load Charities
// models.Charity
// .find({
// "enabled" : true
// })
// //.lean() //Causes query to get plain JSON object (modifiable)
// .exec(function(err, charities) {
// if (err) {
// console.log("Charities: error on loadCharities = ", err);
// res.send(500);
// }
// else {
// //Find favorite and my_cause statuses of charities
// var fullCharities = [];
// var saved_causes = [];
// for (var i=0; i<charities.length; ++i) {
// var charity = {
// "charity" : charities[i]
// }
// //Since req.user.my_causes contains populated charity objects, we map a new array with only the ids
// var charIds = req.user.my_causes.map(function(cause) { return cause.charity._id.toString(); });
// //Check if charity id in charIds
// if ( charIds.indexOf(charity.charity._id.toString()) != -1) {
// //Found charity id in user's my_causes, set as my_cause
// charity['my_cause'] = "1";
// }
// else {
// charity['my_cause'] = "";
// }
// //Check if charity id in user favorites
// if ( req.user.favorites.indexOf(charity.charity._id) != -1) {
// //Found charity id in user's favorites, set as favorite
// charity['favorite'] = "1";
// //Since we have it in favorites, we add it to saved_causes
// saved_causes.push(charity);
// }
// else {
// charity['favorite'] = "";
// }
// // if (charity.charity.name == "Charity 8") {
// // //Add my_cause
// // var my_cause = {
// // "charity" : charity.charity._id,
// // "percentage" : 25,
// // "money_saved" : 100
// // };
// // models.User.findByIdAndUpdate(
// // req.user._id,
// // {$push: {"my_causes" : my_cause}},
// // {safe : true},
// // function(err, model) {console.log(err);}
// // );
// // }
// fullCharities.push(charity);
// }
// console.log("CHARITIES: ", fullCharities);
// console.log("SAVEDCAUSES: ", saved_causes);
var fullData = req.fullData;
var empty = false;
if (fullData['saved_causes'].length == 0) {
empty = true;
}
res.render('charities', {
"page_charities" : 1,
"empty" : empty
});
//console.log(fullData);
// }
// });
}
exports.addMyCause = function(req, res) {
//res.render('empty');
var fullData = req.fullData;
var source = req.query.source;
if (fullData['my_causes'][3].hasOwnProperty('charity')) {
//Already full, we cant add more
res.json({"result" : "full"});
}
else {
// //we're adding a cause, so we set achievement
// achievements[1].completed = true;
if (source == "saved_causes") {
var charity = fullData['saved_causes'][req.query.index_charity];
// //set charity to be one my_cause
// charity['my_cause'] = "1";
// //create new json object for new myCause
// var myCause = {
// "charity" : charity['charity'],
// "percentage" : "0",
// "money_saved" : "0.00",
// "id_saving_amount": "1",
// "saving_amount" : "15c",
// "finished" : ""
// };
var my_cause = {
"charity" : charity.charity._id
};
models.User.findByIdAndUpdate(
req.user._id,
{$push: {"my_causes" : my_cause},
$addToSet : {"achievements" : 1}}, //we're adding a cause, so we set achievement
{safe : true, new : true},
function(err, result) {
if (err) {
console.log(err);
}
console.log("result " + result);
if (result.my_causes.length == 4) {
//set achievement
models.User.findByIdAndUpdate(
req.user._id,
{$addToSet : {"achievements" : 4}},
{safe : true},
function(err, result) {
if (err) {
console.log(err);
}
}
) //achievements[4].completed = true;
}
}
);
// //find which index to add new cause to
// var index = 3;
// for (var i=0; i < fullData['my_causes'].length; ++i) {
// if (!fullData['my_causes'][i].hasOwnProperty('charity')) {
// index = i;
// break;
// }
// }
// fullData['my_causes'][index] = myCause;
// console.log(fullData);
// //check if index == 3. Means we have 4 causes at a time
// if (index == 3) {
// //set achievement
// achievements[4].completed = true;
// }
}
else if (source == "charities") {
var charity = fullData['charities'][req.query.index_charity];
//set charity to be one my_cause
// charity['my_cause'] = "1";
// //create new json object for new myCause
// var myCause = {
// "charity" : charity['charity'],
// "percentage" : "0",
// "money_saved" : "0.00",
// "id_saving_amount": "1",
// "saving_amount" : "15c",
// "finished" : ""
// };
var my_cause = {
"charity" : charity.charity._id
};
models.User.findByIdAndUpdate(
req.user._id,
{$push: {"my_causes" : my_cause},
$addToSet : {"achievements" : 1}}, //we're adding a cause, so we set achievement
{safe : true, new : true},
function(err, result) {
if (err) {
console.log(err);
}
console.log("result " + result);
if (result.my_causes.length == 4) {
//set achievement
models.User.findByIdAndUpdate(
req.user._id,
{$addToSet : {"achievements" : 4}},
{safe : true},
function(err, result) {
if (err) {
console.log(err);
}
}
) //achievements[4].completed = true;
}
}
);
// //find which index to add new cause to
// var index = 3;
// for (var i=0; i < fullData['my_causes'].length; ++i) {
// if (!fullData['my_causes'][i].hasOwnProperty('charity')) {
// index = i;
// break;
// }
// }
// fullData['my_causes'][index] = myCause;
// console.log(fullData);
// //check if index == 3. Means we have 4 causes at a time
// if (index == 3) {
// //set achievement
// achievements[4].completed = true;
// }
}
res.json({"result": "success"});
}
}<file_sep>/public/js/saved_causes.js
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
//To prevent buttons from staying focused after press
$(".btn").mouseup(function(){
$(this).blur();
});
//To open detail of selected charity
$(".causes-body").click(function(e) {
window.open("/cause_detail", "_self");
});
$('.favorite-button').click(function(e) {
var id = $(this).closest('.charity').attr('id').substr('charity'.length);
console.log(id);
if ($(this).hasClass('favorite-button-active')) {
$.get('/charities/toggle_favorite', {"source": "saved_causes", "action":"remove", "index_charity": id}).done(function(data) {
location.reload();
});
}
else {
$.get('/charities/toggle_favorite', {"source": "saved_causes", "action":"add", "index_charity": id}).done(function(data) {
location.reload();
});
}
})
}<file_sep>/public/js/settings.js
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
/*
* Click listeners for Notifications On/Off Segments
*/
$("#btn_group_notification_on").click(function(e) {
//money tab selected, check if active
if (! $(this).hasClass("active")) {
//not active, set active
$(this).toggleClass("active");
//unset active from other btn
$("#btn_group_notification_off").toggleClass("active");
}
});
$("#btn_group_notification_off").click(function(e) {
//days tab selected, check if active
if (! $(this).hasClass("active")) {
//not active, set active
$(this).toggleClass("active");
//unset active from other btn
$("#btn_group_notification_on").toggleClass("active");
}
});
}<file_sep>/routes/saved_causes.js
//var fullData = require('./index').fullData;
exports.view = function(req, res) {
var fullData = req.fullData;
res.render('saved_causes', {
"page_saved_causes" : 1
});
/*
var newVar = {
"name": "Josh",
"lastname": "<NAME>"
}
my_causes["my_causes"].push(newVar);
*/
console.log(fullData);
}<file_sep>/public/js/history.js
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
//To prevent buttons from staying focused after press
$(".btn").mouseup(function(){
$(this).blur();
});
$(".my-causes-body").click(function(e) {
window.open("/history_detail/" + $(this).attr('id').substr('cause'.length), "_self");
});
}<file_sep>/public/js/charities.js
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
/*
* Click listeners for Auto/Manual Segments
*/
$("#btn_group_all").click(function(e) {
//all tab selected, check if active
if (! $(this).hasClass("active")) {
//not active, set active
$(this).toggleClass("active");
//unset active from other btn
$("#btn_group_favorites").toggleClass("active");
//hide other
$('#row-favorites').hide();
//show this
$('#row-all-charities').show();
//set session to remember selected tab
$.get('/session/update_session', {"sessionKey" : "showMyFavorites", "sessionValue": ""});
//also update local session variable
session.showMyFavorites = "";
}
});
$("#btn_group_favorites").click(function(e) {
//favorites tab selected, check if active
if (! $(this).hasClass("active")) {
//not active, set active
$(this).toggleClass("active");
//unset active from other btn
$("#btn_group_all").toggleClass("active");
//hide other
$('#row-all-charities').hide();
//show this
$('#row-favorites').show();
//set session to remember selected tab
$.get('/session/update_session', {"sessionKey" : "showMyFavorites", "sessionValue": "favorites"});
//also update local session variable
session.showMyFavorites = "favorites";
}
});
//To prevent buttons from staying focused after press
$(".btn").mouseup(function(){
$(this).blur();
});
//To open detail of selected charity
$(".causes-body").click(function(e) {
window.open("/cause_detail/charities/" + $(this).attr('id').substr('cause'.length), "_self");
});
//To open detail of selected charity
$(".causes-body-favorites").click(function(e) {
window.open("/cause_detail/saved_causes/" + $(this).attr('id').substr('cause'.length), "_self");
});
$('.add-button').click(function(e) {
if (hasBankAccount) {
var id = $(this).closest('.charity').attr('id').substr('charity'.length);
console.log(id);
$.get("/charities/add_my_cause", {'source':'charities', 'index_charity': id}).done(function(data){
if (data['result'] == "full") {
$('#addMyCauseFull').modal("show");
}
else {
location.reload();
}
});
}
});
$('.add-button-favorites').click(function(e) {
if (hasBankAccount) {
var id = $(this).closest('.charity').attr('id').substr('charity'.length);
console.log(id);
$.get("/charities/add_my_cause", {'source':'saved_causes', 'index_charity': id}).done(function(data){
if (data['result'] == "full") {
$('#addMyCauseFull').modal("show");
}
else {
location.reload();
}
});
}
});
$('.favorite-button').click(function(e) {
var id = $(this).closest('.charity').attr('id').substr('charity'.length);
console.log(id);
if(session.showMyFavorites == "favorites") {
if ($(this).hasClass('favorite-button-active')) {
$.get('/charities/toggle_favorite', {"source": "saved_causes", "action":"remove", "index_charity": id}).done(function(data) {
location.reload();
});
}
else {
$.get('/charities/toggle_favorite', {"source": "saved_causes", "action":"add", "index_charity": id}).done(function(data) {
location.reload();
});
}
}
else {
if ($(this).hasClass('favorite-button-active')) {
$.get('/charities/toggle_favorite', {"source": "charities", "action":"remove", "index_charity": id}).done(function(data) {
location.reload();
});
}
else {
$.get('/charities/toggle_favorite', {"source": "charities", "action":"add", "index_charity": id}).done(function(data) {
location.reload();
});
}
}
})
}<file_sep>/routes/createacct.js
// var fullData = require('./index').fullData;
// var user = require('./index').user;
//require DB model
var models = require('../models');
// var achievements = require('./achievements').achievements;
exports.view = function(req, res) {
res.render('createacct');
};
exports.attemptSignUp = function(req, res) {
//set user
// user.id = "2";
// user.firstname = req.body.firstname;
// user.lastname = req.body.lastname;
// user.name = user.firstname + " " + user.lastname;
// user.email = req.body.email;
// user.password = <PASSWORD>;
// user.bank_account = "";
// user.balance = "";
//clear fulldata
// fullData.my_causes = [{},{},{},{}];
// fullData.saved_causes = [];
// fullData.history = [];
// for (var i=0; i<fullData.charities.length; ++i) {
// var currCause = fullData.charities[i];
// currCause.my_cause = "";
// currCause.favorite = "";
// }
// //clear achievements (except first one)
// for (var i=1; i<achievements.length; ++i) {
// achievements[i].completed = false;
// }
//clear session
delete req.session.user;
delete req.user;
delete req.fullData;
// console.log(fullData);
// console.log(user);
// console.log(achievements);
//Check if user exists
models.User
.findOne({
"email" : req.body.email,
"enabled" : true
})
.exec(function(err, user) {
if (user) {
//Send success
res.json({"error" : true, "message" : "User already exists."});
}
else {
//User doesn't exist, create one
var new_user = new models.User({
"firstname" : req.body.firstname,
"lastname" : req.body.lastname,
"email" : req.body.email,
"password" : <PASSWORD>,
"achievements" : [0] //set first achievement as done
});
new_user.save(function(err, user) {
if (err) {
console.log("SingUp: error on create new user = " + err);
//Send success
res.json({"error" : true, "message" : "Error creating user."});
}
else {
//refresh session value with user data
req.session.user = user;
res.json({"error" : false, "message" : "Success"});
}
});
}
});
//res.redirect('/');
};
exports.checkEmailExists = function(req, res) {
console.log("email: "+req.body.email);
//Check if email already exists for bootstrapValidator
models.User
.findOne({
"email" : req.body.email,
"enabled" : true
})
.exec(function(err, user) {
if (user || err) {
//Send success
res.json({"valid" : false});
}
else {
//Send success
res.json({"valid" : true});
}
});
};<file_sep>/routes/login.js
//require DB model
var models = require('../models');
//var user = require('./index').user;
exports.view = function(req, res) {
//If we load login, we destroy session
req.session.destroy();
delete req.user;
delete req.fullData;
res.render('login');
}
exports.attemptLogin = function(req, res) {
var email = req.body.email;
var password = <PASSWORD>;
//Lookup user in DB
models.User
.find({
"email" : email,
"password" : <PASSWORD>,
"enabled" : true
})
.limit(1)
.exec(function(err, user) {
if (user && user.length > 0) {
//console.log("wut" + user[0]);
//refresh session value with user data
req.session.user = user[0];
//console.log("WHY: ", req.session.user);
//Send success
res.json({"error" : false, "message" : "Success"});
}
else {
res.json({"error" : true, "message" : "User not found"});
}
});
/*
if (user.email == email && user.password == <PASSWORD>) {
//found user
res.json({"error":false, "message":"Success"})
}
else {
res.json({"error":true, "message":"User not found"});
}*/
}<file_sep>/routes/index.js
// Get all of our friend data
//var fullData = require('../static_json/data.json');
//exports.fullData = fullData;
//var achievements = require('./achievements').achievements;
var currentHomePage = {"current": "home"};
exports.currentHomePage = currentHomePage;
//require DB model
var models = require('../models');
exports.view = function(req, res){
if (currentHomePage.current == "homeHistoryOnBottom") {
res.redirect('/homeHistoryOnBottom');
}
else if (currentHomePage.current == "homeDeleteCause") {
res.redirect('/homeDeleteCause');
}
else {
res.redirect('/home');
}
var fullData = req.fullData;
res.render('index', {
'page_home' : 1
});
// for (var i=0; i < fullData.my_causes.length; ++i) {
// var cause = fullData.my_causes[i];
// if (cause.percentage == 100) {
// cause['color'] = "success";
// }
// else if (cause.percentage <= 40) {
// cause['color'] = "danger";
// }
// else {
// cause['color'] = "warning";
// }
// //progress-empty is used to set color of progressbar text to black instead of white for visibility
// if (parseInt(cause.percentage) <= 28) {
// cause['progress-empty'] = true;
// }
// else {
// cause['progress-empty'] = false;
// }
// }
// console.log(fullData);
};
//Version to permit to come back to original homepage
exports.home = function(req, res) {
//if we get here, we will remain with this as homepage
//so we set current homepage to redirect here from every other link
currentHomePage.current = "home";
//Verify login here
if (req.redirect) {
res.redirect('/login');
}
//Get my_causes
// models.MyCause
// .find({
// "id_user" : req.user._id
// })
// .populate("charity")
// .exec(function(err, my_causes) {
// models.Charity
// .find()
// .exec(function(err, charities) {
// if (err) {
// console.log("Home: error on loading myCauses = " + err);
// res.send(500);
// }
// else {
// console.log("MYCAUSES: " + my_causes);
//console.log("CHARITIES: " + charities);
var fullData = req.fullData;
res.render('index', {
'page_home' : 1
});
// for (var i=0; i < fullData.my_causes.length; ++i) {
// var cause = fullData.my_causes[i];
// if (cause.percentage == 100) {
// cause['color'] = "success";
// }
// else if (cause.percentage <= 40) {
// cause['color'] = "danger";
// }
// else {
// cause['color'] = "warning";
// }
// //progress-empty is used to set color of progressbar text to black instead of white for visibility
// if (parseInt(cause.percentage) <= 28) {
// cause['progress-empty'] = true;
// }
// else {
// cause['progress-empty'] = false;
// }
// }
// console.log(fullData);
// }
// });
};
//Alternate version with history button on bottom
exports.homeHistoryOnBottom = function(req, res) {
//if we get here, we will remain with this as homepage
//so we set current homepage to redirect here from every other link
currentHomePage.current = "homeHistoryOnBottom";
//Verify login here
if (req.redirect) {
res.redirect('/login');
}
var fullData = req.fullData;
//console.log(data);
res.render('index_history_bottom', {
'page_home' : 1
});
// for (var i=0; i < fullData.my_causes.length; ++i) {
// var cause = fullData.my_causes[i];
// if (cause.percentage == 100) {
// cause['color'] = "success";
// }
// else if (cause.percentage <= 40) {
// cause['color'] = "danger";
// }
// else {
// cause['color'] = "warning";
// }
// //progress-empty is used to set color of progressbar text to black instead of white for visibility
// if (parseInt(cause.percentage) <= 28) {
// cause['progress-empty'] = true;
// }
// else {
// cause['progress-empty'] = false;
// }
// }
// console.log(fullData);
};
//Alternate version with delete cause option
exports.homeDeleteCause = function(req, res) {
//if we get here, we will remain with this as homepage
//so we set current homepage to redirect here from every other link
currentHomePage.current = "homeDeleteCause";
//Verify login here
if (req.redirect) {
res.redirect('/login');
}
var fullData = req.fullData;
//console.log(data);
res.render('index_delete_cause', {
'page_home' : 1
});
// for (var i=0; i < fullData.my_causes.length; ++i) {
// var cause = fullData.my_causes[i];
// if (cause.percentage == 100) {
// cause['color'] = "success";
// }
// else if (cause.percentage <= 40) {
// cause['color'] = "danger";
// }
// else {
// cause['color'] = "warning";
// }
// //progress-empty is used to set color of progressbar text to black instead of white for visibility
// if (parseInt(cause.percentage) <= 28) {
// cause['progress-empty'] = true;
// }
// else {
// cause['progress-empty'] = false;
// }
// }
// console.log(fullData);
};
exports.addMoneyToCause = function(req, res) {
//res.render('empty');
var fullData = req.fullData;
var id_cause = req.params.id_cause;
var amountToAdd = parseFloat(req.params.amountToAdd);
//check if money exceeds cause's cost
var cause = fullData.my_causes[id_cause];
var cost = parseFloat(cause.charity.cost);
var money_saved = parseFloat(cause.money_saved);
if (money_saved + amountToAdd >= cost) {
money_saved = cost;
cause.percentage = 100;
cause.finished = true;
}
else {
money_saved = money_saved + amountToAdd;
cause.money_saved = money_saved;
cause.percentage = Math.floor(money_saved / cost * 100);
}
//Since we added money, we set achievement
//achievements[2].completed = true;
//Update user
models.User.findOneAndUpdate(
{ "_id" : req.user._id, "my_causes._id" : cause._id},
{$set: { "my_causes.$.money_saved" : money_saved * 100, "my_causes.$.percentage" : cause.percentage, "my_causes.$.finished" : cause.finished },
$addToSet : {"achievements" : 2} },
function(err, result) {
if (err) {
console.log("Home: error on addMoneyToCause while updating user = " + err);
}
//calculate new balance
var new_balance = parseFloat(req.user['balance']) + amountToAdd;
//Update user balance
models.User
.find({
"_id" : req.user._id
})
.update({
"balance" : new_balance * 100
})
.exec(function(err) {
if (err) {
console.log("Home: error on addMoneyToCause while updating balance = " + err);
}
//In any case, redirect
if (req.params.source == "index") {
res.redirect('/');
}
else {
res.redirect('/my_cause_detail/' + id_cause);
}
});
}
);
// var new_balance = parseFloat(user['balance'] != "" ? user['balance'] : "0.00") + amountToAdd;
// user['balance'] = new_balance.toFixed(2).toString();
// console.log(fullData);
// console.log(user);
};
exports.donateToCause = function(req, res) {
//res.render('empty');
var fullData = req.fullData;
var id_cause = req.params.id_cause;
var charity = fullData.my_causes[id_cause].charity;
//add to history
//fullData.history.push({"charity":charity});
//we donated, so set achievement
//achievements[3].completed = true;
//deduct money from user balance
//calculate new balance
var new_balance = parseFloat(req.user['balance']) - parseFloat(charity.cost);
//Update user balance
models.User
.find({
"_id" : req.user._id
})
.update({
"balance" : new_balance * 100, //update balance
$push : {"history" : {"charity" : charity._id } },
$addToSet : {"achievements" : 3}
})
.exec(function(err) {
if (err) {
console.log("Home: error on donateToCause while updating balance = " + err);
}
//In any case, redirect
//delete from my causes
res.redirect('/my_cause_detail/deleteCause/' + id_cause);
});
// var new_balance = parseFloat(user['balance'] != "" ? user['balance'] : "0.00") - parseFloat(charity.cost);
// user['balance'] = "" + new_balance.toFixed(2);
};
<file_sep>/loadAchievements.js
/*
This script will initialize a local Mongo database
on your machine so you can do development work.
IMPORTANT: You should make sure the
local_database_name
variable matches its value in app.js Otherwise, you'll have
initialized the wrong database.
*/
var mongoose = require('mongoose');
var models = require('./models');
// Connect to the Mongo database, whether locally or on Heroku
var local_database_uri = 'mongodb://caritydb:Carity%69!<EMAIL>:19648/heroku_g8zckv7m';
var database_uri = process.env.MONGOLAB_URI || local_database_uri;
mongoose.connect(database_uri);
// Do the initialization here
// Step 1: load the JSON data
var achievements = require('./static_json/achievements.json');
// Step 2: Remove all existing documents
models.Achievement
.find()
.remove()
.exec(onceClear); // callback to continue at
// Step 3: load the data from the JSON file
function onceClear(err) {
if(err) console.log(err);
// loop over the projects, construct and save an object from each one
// Note that we don't care what order these saves are happening in...
var to_save_count = achievements.length;
for(var i=0; i<achievements.length; i++) {
var json = achievements[i];
var proj = new models.Achievement({
"_id" : i,
"name" : json['name'],
"description" : json['description']
});
proj.save(function(err, proj) {
if(err) console.log(err);
to_save_count--;
console.log(to_save_count + ' left to save');
if(to_save_count <= 0) {
console.log('DONE');
//Set all Users to have only first achievement
models.User.find().update({"achievements" : [0]}).exec(function(err, result) {
// The script won't terminate until the
// connection to the database is closed
mongoose.connection.close()
});
}
});
}
}
<file_sep>/public/js/cause_detail.js
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
//To prevent buttons from staying focused after press
$(".btn").mouseup(function(){
$(this).blur();
});
$('.add-button').click(function(e) {
$.get("/charities/add_my_cause", {'source':'charities', 'index_charity': id_cause}).done(function(data){
if (data['result'] == "full") {
$('#addMyCauseFull').modal("show");
}
else {
location.reload();
}
});
});
$('.add-button-favorites').click(function(e) {
var id = $(this).closest('.charity').attr('id').substr('charity'.length);
console.log(id);
$.get("/charities/add_my_cause", {'source':'saved_causes', 'index_charity': id_cause}).done(function(data){
if (data['result'] == "full") {
$('#addMyCauseFull').modal("show");
}
else {
location.reload();
}
});
});
$('.favorite-button').click(function(e) {
if(session.showMyFavorites == "favorites") {
if ($(this).hasClass('favorite-button-active')) {
$.get('/charities/toggle_favorite', {"source": "saved_causes", "action":"remove", "index_charity": id_cause}).done(function(data) {
window.open('/charities', "_self");
});
}
else {
$.get('/charities/toggle_favorite', {"source": "saved_causes", "action":"add", "index_charity": id_cause}).done(function(data) {
location.reload();
});
}
}
else {
if ($(this).hasClass('favorite-button-active')) {
$.get('/charities/toggle_favorite', {"source": "charities", "action":"remove", "index_charity": id_cause}).done(function(data) {
location.reload();
});
}
else {
$.get('/charities/toggle_favorite', {"source": "charities", "action":"add", "index_charity": id_cause}).done(function(data) {
location.reload();
});
}
}
})
}<file_sep>/models.js
var Mongoose = require('mongoose');
exports.Mongoose = Mongoose;
var Schema = Mongoose.Schema;
//User Model
var UserSchema = new Schema({
"firstname" : String,
"lastname" : String,
"email" : String,
"password" : String,
"bank_account" : { type : String, default : "", get : obfuscate},
"balance" : { type : Number, default : 0, get : getInDollars},
"notifications" : { type : Boolean, default : true },
"my_causes" : [ {
"charity" : { type : Schema.Types.ObjectId, "ref" : "Charity" },
"percentage" : { type : Number, default : 0 },
"money_saved" : { type : Number, default : 0, get: getInDollars },
"finished" : { type : Boolean, default : false }
}
],
"favorites" : [ { type : Schema.Types.ObjectId, "ref" : "Charity" } ],
"history" : [ { "charity" : { type : Schema.Types.ObjectId, "ref" : "Charity" } } ],
"achievements" : [ { type : Number, "ref" : "Achievement", unique : true, dropDups : true } ],
"enabled" : { type : Boolean, default : true }
});
//Function to convert from cents (as stored in DB) to dollars
function getInDollars(balance) {
return balance > 0 ? ((balance / 100).toFixed(2)) : 0;
}
//Function to obfuscate a user bank_account with XXXX
function obfuscate(bank_account) {
return (bank_account != "" && bank_account.length >= 3) ? ("XXXXXXXX " + bank_account.substr(-3)) : "";
}
exports.User = Mongoose.model('User', UserSchema);
//Charity Model
var CharitySchema = new Schema({
"name" : String,
"action" : String,
"description" : String,
"cost" : { type : Number, get : getInDollars },
"image" : String,
"url" : String,
"enabled" : { type : Boolean, default : true }
});
exports.Charity = Mongoose.model('Charity', CharitySchema);
//Achievement Model
var AchievementSchema = new Schema({
"_id" : Number,
"name" : String,
"description" : String
});
exports.Achievement = Mongoose.model('Achievement', AchievementSchema);
//UserCharity Model (Charities list connected to user to know whether user has it in my_causes or favorites)
// var UserCharity = new Mongoose.Schema({
// "charity" :
// });
//MyCause Model
// var MyCauseSchema = new Schema({
// "id_user" : { type : Schema.Types.ObjectId },
// "charity" : { type : Schema.Types.ObjectId, ref : 'Charity' },
// "percentage" : { type : Number, default : 0 },
// "money_saved" : { type : Number, default : 0, get: getInDollars },
// "finished" : { type : Boolean, default : false }
// });
// exports.MyCause = Mongoose.model('MyCause', MyCauseSchema);
<file_sep>/routes/settings.js
//var fullData = require('./index').fullData;
//require DB model
var models = require('../models');
exports.view = function(req, res) {
//check if causes active
var fullData = req.fullData;
var size = fullData.my_causes.length;
var active = false;
for (var i=0; i<size; ++i) {
if (fullData['my_causes'][i].hasOwnProperty('charity')) {
//found an active cause.
active = true;
break;
}
}
res.render('settings', {
"page_settings" : 1,
"active_causes" : active
});
}
exports.transferToBank = function(req, res) {
//res.render('empty');
//Update user balance
models.User
.find({
"_id" : req.user._id
})
.update({
"balance" : 0
})
.exec(function(err) {
if (err) {
console.log("Settings: error on transferToBank = " + err);
}
//In any case, redirect to settings.
res.redirect('/settings');
});
//user['balance'] = "";
}
exports.addBankAccount = function(req, res) {
//res.render('empty');
//Update user bank account
models.User
.find({
"_id" : req.user._id
})
.update({
"bank_account" : req.body.bank_account
})
.exec(function(err) {
if (err) {
console.log("Settings: error on addBankAccount = " + err);
}
//In any case, redirect to settings.
res.redirect('/settings');
});
//user['bank_account'] = "XXXXXXXX " + req.body.bank_account.substr(-3);
//res.redirect('/settings');
}
exports.deleteBankAccount = function(req, res) {
//res.render('empty');
//Delete user bank account
models.User
.find({
"_id" : req.user._id
})
.update({
"bank_account" : "",
"balance" : 0
})
.exec(function(err) {
if (err) {
console.log("Settings: error on deleteBankAccount = " + err);
}
//In any case, redirect to settings.
res.redirect('/settings');
});
// user['bank_account'] = "";
// user['balance'] = "";
// res.redirect('/settings');
}<file_sep>/routes/history.js
//var fullData = require('./index').fullData;
//var user = require('./index').user;
exports.view = function(req, res) {
var fullData = req.fullData;
var empty = false;
if (fullData['history'].length == 0) {
empty = true;
}
res.render('history', {
"empty" : empty
});
}<file_sep>/routes/achievements.js
// var achievements = require('../static_json/achievements.json');
// exports.achievements = achievements;
//require DB model
var models = require('../models');
exports.view = function(req, res) {
//Load achievments
models.Achievement
.find()
.lean()
.exec(function(err, achievements) {
if (err) {
console.log("Achievements: error on loadAchievements = " + err);
res.send(500);
}
else {
for (var i=0; i<achievements.length; ++i) {
var achievement = achievements[i];
if ( req.user.achievements.indexOf(achievement._id) != -1) {
//Ach found, mark as completed
achievement['completed'] = true;
}
else {
achievement['completed'] = false;
}
}
res.render('achievements', {
"page_achievements" : 1,
"achievements" : achievements
});
console.log(achievements);
}
});
} | c9415ca0b162e1e0ef78b505839bba006a2debe0 | [
"Markdown",
"JavaScript"
] | 17 | Markdown | santana69/cse170-group2 | da27a4cc6b1bf9f20c0e3353e10c1f669195da81 | 7de3fed4242a76ec9bbd83857b2bee2739eff900 |
refs/heads/master | <file_sep>package Datagrams;
import java.net.*;
public class TheServer {
public static int serverPort = 998;
public static int clientPort = 999;
public static int buffer_size = 1024;
public static DatagramSocket ds;
public static byte buffer[] = new byte[buffer_size];
public static void TheServer() throws Exception {
int pos=0;
while (true) {
int c = System.in.read();
switch (c) {
case -1:
System.out.println("Сервер завершил работу.");
return;
case '\r':
break;
case '\n':
ds.send(new DatagramPacket(buffer,pos,InetAddress.getLocalHost(),clientPort));
pos=0;
break;
default:
buffer[pos++] = (byte) c;
}
}
}
public static void main(String[] args) throws Exception{
ds = new DatagramSocket(serverPort);
TheServer();
}
}
<file_sep>import java.util.Arrays;
import java.util.regex.*;
public class Main {
/*
* Пример кода проверки соответствия строки шаблону
*/
private static void example(){
Pattern pattern = Pattern.compile("ITIS");
Matcher matcher = pattern.matcher("This is ITIS!");
System.out.print("Find: ");
// Поиск соответствия шаблону в строку (поиск подстроки)
if(matcher.find()){
System.out.println(matcher.group()); // Вывести всю строку
}else{
System.out.println("Not matched");
}
System.out.print("Matches: ");
// Строгая проверка соответствия строки шаблону
if(matcher.matches()){
System.out.println(matcher.group()); // Вывести всю строку
}else{
System.out.println("Not matched");
}
}
/*
* Поиск соответствия в строке
*/
private static boolean validator (String patternString, String text){
try{
Pattern pattern = Pattern.compile(patternString);
Matcher matcher = pattern.matcher(text);
boolean result = false;
System.out.println("String: " + text + "\nPattern:" + patternString);
// Найти следующее соответствие
while (matcher.find()){
result = true;
int count;
// Количество соответствий
if((count = matcher.groupCount()) > 0)
// group() - показать соответствие
// group(0) - вся строка соответсвтия
// group(int N) - найденное соответствие (то что в скобках)
System.out.println("Count:" + count + "\nMatched String:" + matcher.group(count));
}
return result;
} catch (PatternSyntaxException e) {
System.out.println("Wrong regexp pattern");
return false;
}
}
/*
* Поиск соответствия и замена в строке
*/
private static boolean replacer (String patternString, String text, String replaceTo){
try{
Pattern pattern = Pattern.compile(patternString);
Matcher matcher = pattern.matcher(text);
boolean result = false;
System.out.println("String: " + text + "\nPattern:" + patternString);
while (matcher.find()){
System.out.println("New String: " + matcher.replaceAll(replaceTo));
result = true;
}
return result;
} catch (PatternSyntaxException e) {
System.out.println("Wrong regexp pattern");
return false;
}
}
/*
* Поиск соответствия и замена в строке
*/
private static boolean splitter (String splitterString, String text){
try{
Pattern pattern = Pattern.compile(splitterString);
String[] words = pattern.split(text);
System.out.println(Arrays.toString(words));
return true;
} catch (PatternSyntaxException e) {
System.out.println("Wrong regexp pattern");
return false;
}
}
private static boolean findHexColor(String hexColor){
Pattern pattern = Pattern.compile("^#[A-Fa-f0-9]{6}$");
Matcher matcher = pattern.matcher(hexColor);
return matcher.matches();
}
public static void main(String[] args) {
// example();
// System.out.println(validator("I(.*)S", "++ITIS++"));
// System.out.println(validator("I(.*)S", "IT IS ITIS!"));
// System.out.println(replacer("I(.*)S", "What is it? IT IS ITIS!", "-"));
// System.out.println(splitter(" ", "What is it? IT IS ITIS!"));
// System.out.println(splitter("I(.*)S", "What is it? IT IS ITIS!"));
// System.out.println(findHexColor("#FFEE00"));
// System.out.println(findHexColor("#FE0"));
// System.out.println(findHexColor("12345F"));
}
}
<file_sep>Hello!
Repo with educational materials of Java Programming course for students of ITIS (Kazan, Russia)
<file_sep>package CalcPi;
/**
* Example 4. Pi
*/
import javax.swing.*;
class PI extends Thread
{
int from,to ;
static int n =72000000,np=4;
double h,sum,x;
static double ssum = 0.0 ;
static int j;
JTextArea display;
public PI(JTextArea display ){
j = 0 ;
this.display = display;
}
public double f(double a)
{
return(4.0 / (1.0 + a*a));
}
synchronized void count()
{
j = j + 1 ;
ssum += h * sum ;
display.setText(String.valueOf(ssum));
}
public void run()
{
for (int k=0; k<np; k++)
{
from = k * n / np;
to = (k + 1) * n / np;
h = 1.0 / (double) n;
sum = 0.0;
for (int i=from; i<to; i++)
{
x = h * ((double) i - 0.5);
sum += f(x);
}
count() ;
try{
sleep(1000);
}catch (InterruptedException e){
}
}
}
}<file_sep>import javax.xml.stream.*;
import java.io.*;
public class StAXExample {
public static void main(String[] args) {
readerXML();
}
static void readerXML(){
try{
XMLInputFactory factory = XMLInputFactory.newInstance();
XMLStreamReader r = factory.createXMLStreamReader
("app.xml", new FileInputStream("app.xml"));
while (r.hasNext()) {
parseEvent(r);
r.next();
}
}catch(Exception e){
}
}
static void parseEvent(XMLStreamReader reader) {
switch (reader.getEventType()) {
case XMLStreamConstants.START_DOCUMENT:
System.out.println("Start of document");
break;
case XMLStreamConstants.START_ELEMENT:
System.out.println("Start element = " + reader.getLocalName());
break;
case XMLStreamConstants.CHARACTERS:
int beginIndex = reader.getTextStart();
int endIndex = reader.getTextLength();
String value = new String(reader.getTextCharacters(),
beginIndex,
endIndex).trim();
if (!value.equalsIgnoreCase(""))
System.out.println("Value = " + value);
break;
case XMLStreamConstants.END_ELEMENT:
System.out.println("End element = " + reader.getLocalName());
break;
case XMLStreamConstants.COMMENT:
if (reader.hasText())
System.out.print(reader.getText());
break;
}
}
static void createXML(){
try{
FileOutputStream fos = new FileOutputStream("out.xml");
XMLOutputFactory output = XMLOutputFactory.newInstance();
XMLStreamWriter writer = output.createXMLStreamWriter(fos);
writer.writeStartDocument();
writer.writeStartElement("document");
writer.writeStartElement("data");
writer.writeAttribute("name", "value");
writer.writeCharacters("Hello World!");
writer.writeEndElement();
writer.writeEndElement();
writer.writeEndDocument();
writer.flush();
writer.close();
}catch(Exception e){
}
}
}
<file_sep>import java.io.*;
import java.util.*;
import javax.xml.parsers.*;
import org.w3c.dom.*;
public class XMLTest {
private static final String SPACE = " ";
/**
* Возвращает объект Document, который является объектным представлением
* XML документа.
*/
private static Document getDocument() throws Exception {
try {
DocumentBuilderFactory f = DocumentBuilderFactory.newInstance();
f.setValidating(false);
DocumentBuilder builder = f.newDocumentBuilder();
return builder.parse(new File("app.xml"));
} catch (Exception exception) {
String message = "XML parsing error!";
throw new Exception(message);
}
}
private static void showDocument(Document doc) {
StringBuffer content = new StringBuffer();
Node node = doc.getChildNodes().item(0);
ApplicationNode appNode = new ApplicationNode(node);
content.append("Application \n");
List<ClassNode> classes = appNode.getClasses();
for (int i = 0; i < classes.size(); i++) {
ClassNode classNode = classes.get(i);
content.append(SPACE + "Class: " + classNode.getName() + " \n");
List<MethodNode> methods = classNode.getMethods();
for (int j = 0; j < methods.size(); j++) {
MethodNode methodNode = methods.get(j);
content.append(SPACE + SPACE + "Method: "
+ methodNode.getName() + " \n");
}
}
System.out.println(content.toString());
}
public static void main(String[] args) {
try{
Document doc = getDocument();
showDocument(doc);
}catch(Exception e) {
}
}
/**
* Объектное представление приложения.
*/
public static class ApplicationNode {
Node node;
public ApplicationNode(Node node) {
this.node = node;
}
public List<ClassNode> getClasses() {
ArrayList<ClassNode> classes = new ArrayList<ClassNode>();
/**
* Получаем список дочерних узлов для данного узла XML, который
* соответствует приложению application. Здесь будут располагаться
* все узлы Node, каждый из которых является объектным
* представлением тега class для текущего тега application.
*/
NodeList classNodes = node.getChildNodes();
for (int i = 0; i < classNodes.getLength(); i++) {
Node node = classNodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
/**
* Создаем на основе Node узла своё объектное
* представление класса.
*/
ClassNode classNode = new ClassNode(node);
classes.add(classNode);
}
}
return classes;
}
}
/**
* Объектное представление класса.
*/
public static class ClassNode {
Node node;
/**
* Создаем новый экземпляр объекта на основе Node узла.
*/
public ClassNode(Node node) {
this.node = node;
}
/**
* Возвращает список методов класса.
*/
public List<MethodNode> getMethods() {
ArrayList<MethodNode> methods = new ArrayList<MethodNode>();
/**
* Получаем список дочерних узлов для данного узла XML,
* который соответствует классу class. Здесь будут располагаться
* все узлы Node, каждый из которых является объектным
* представлением тега method для текущего тега class.
*/
NodeList methodNodes = node.getChildNodes();
for (int i = 0; i < methodNodes.getLength(); i++) {
node = methodNodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
/**
* Создаем на основе Node узла своё объектное представление
* метода.
*/
MethodNode methodNode = new MethodNode(node);
methods.add(methodNode);
}
}
return methods;
}
/**
* Возвращае имя класса.
*/
public String getName() {
/**
* Получаем атрибуты узла метода.
*/
NamedNodeMap attributes = node.getAttributes();
/**
* Получаем узел аттрибута.
*/
Node nameAttrib = attributes.getNamedItem("name");
/**
* Возвращаем значение атрибута.
*/
return nameAttrib.getNodeValue();
}
}
/**
* Объектное представление сущности метод класса.
*/
public static class MethodNode {
Node node;
/**
* Создаем новый экземпляр объекта на основе Node узла.
*/
public MethodNode(Node node) {
this.node = node;
}
/**
* Возвращает имя метода.
*/
public String getName() {
/**
* Получаем атрибуты узла метода.
*/
NamedNodeMap attributes = node.getAttributes();
/**
* Получаем узел аттрибута.
*/
Node nameAttrib = attributes.getNamedItem("name");
/**
* Возвращаем значение атрибута.
*/
return nameAttrib.getNodeValue();
}
}
}<file_sep>import java.net.*;
import java.io.*;
import java.util.Date;
class UCDemo
{
public static void main(String args[]) throws Exception {
int c;
URL hp = new URL("http://ya.ru");
URLConnection hpCon = hp.openConnection();
// получить дату
long d = hpCon.getDate();
if(d==0)
System.out.println("Нет информации о дате.");
else
System.out.println("Дата: " + new Date(d));
// получить тип содержимого
System.out.println("Тип содержимого: " + hpCon.getContentType());
// получить дату устаревания
d = hpCon.getExpiration();
if(d==0)
System.out.println("Нет информации о сроке действия.");
else
System.out.println("Устареет: " + new Date(d));
// получить дату последней модификации
d = hpCon.getLastModified();
if(d==0)
System.out.println("Нет информации о дате последней модификации.");
else
System.out.println("Дата последней модификации: " + new Date(d));
// получить длину содержимого
int len = hpCon.getContentLength();
if(len == -1)
System.out.println("Длина содержимого недоступна.");
else
System.out.println("Длина содержимого: " + len);
if(len != 0) {
System.out.println("=== Содержимое ===");
InputStream input = hpCon.getInputStream();
int i = len;
while (((c = input.read()) != -1)) {
System.out.print((char) c);
}
input.close();
} else {
System.out.println("Содержимое недоступно.");
}
}
}
<file_sep>package CalcPi;
/**
* Example 4. Pi
*/
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
public class CalcPi extends JFrame {
JTextArea display= new JTextArea(1, 20);
JPanel buttonPanel = new JPanel(new GridLayout(3,1));
JButton button0 = new JButton("Calculate PI");
CalcPi() {
super("PI Calculator");
setBounds(300, 300, 300, 300);
button0.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
PI pi = new PI(display);
pi.start();
}
});
setLayout(new BorderLayout());
// Добавление элементов на форму
add(display, BorderLayout.NORTH);
add(buttonPanel,BorderLayout.CENTER);
// Добавление кнопок на панель
buttonPanel.add(new JPanel());
buttonPanel.add(button0);
setVisible(true);
}
public static void main(String[] args) {
new CalcPi();
}
}
<file_sep>import java.awt.*;
import java.awt.event.*;
import java.lang.*;
import java.util.*;
class View implements Observer {
private TextField myTextField;
private Button button;
View() {
System.out.println("View()");
Frame frame = new Frame("simple MVC");
frame.add("North", new Label("counter"));
myTextField = new TextField();
frame.add("Center", myTextField);
Panel panel = new Panel();
button = new Button("PressMe");
panel.add(button);
frame.add("South", panel);
frame.addWindowListener(new CloseListener());
frame.setSize(200,100);
frame.setLocation(100,100);
frame.setVisible(true);
}
public void update(Observable obs, Object obj) {
myTextField.setText("" + ((Integer)obj).intValue());
}
public void setValue(int v){
myTextField.setText("" + v);
}
public void addController(Controller controller){
System.out.println("View : adding controller");
button.addActionListener(controller);
}
public static class CloseListener extends WindowAdapter {
public void windowClosing(WindowEvent e) {
e.getWindow().setVisible(false);
System.exit(0);
}
}
}<file_sep>package BankAccount;
/**
* Example 3. Bank Account (synchronized, locks, priority, yield)
*/
public class Main {
public static Logger logger = new Logger ("D:/Test/logger.txt");
public synchronized static boolean transaction(BankAccount fromAccount, BankAccount toAccount, double money){
if(fromAccount.getId() != toAccount.getId()){
if(fromAccount.isAlive() && toAccount.isAlive() && fromAccount.getBalance() >= money ){
if(
fromAccount.removeMoney(money) &&
toAccount.addMoney(money)) {
logger.log("Transaction: User1 #" + fromAccount.userId + "; User2 #" + toAccount.userId + "; money " + money);
return true;
}
}
}
return false;
}
public static void main(String args[]) {
logger.start();
BankAccount account1 = new BankAccount(1, 1800.00, logger);
BankAccount account2 = new BankAccount(2, 0.00, logger);
BankAccount account3 = new BankAccount(3, 1.00, logger);
account1.setPriority(1);
account2.setPriority(10);
account3.setPriority(1);
account1.start();
account2.start();
account3.start();
transaction(account1, account2, 1700);
}
}<file_sep>import java.io.*;
import java.util.Arrays;
public class Main {
public static void showInfo(String fileName){
File f = new File(fileName);
if(f.exists()){
System.out.println("Path: " + f.getPath());
if(f.isFile()){
System.out.println("File size: " + f.length());
System.out.println("Last modified date: " + f.lastModified());
}else if(f.isDirectory()){
System.out.println("Directory Listing: " + Arrays.toString(f.list()));
}
}else{
System.out.println("File does not exist");
}
}
public static void readFile(String fileName){
try{
// read bytes
FileInputStream f1 = new FileInputStream(fileName);
System.out.println("Available : " + f1.available());
while (f1.available() > 0){
System.out.println((char) f1.read());
}
f1.close();
// read bytes array
FileInputStream f2 = new FileInputStream(fileName);
int available = f2.available();
System.out.println("Available : " + available);
byte b[] = new byte[available];
f2.read(b);
System.out.println(new String(b, 0, available));
f2.close();
}catch (FileNotFoundException e){
System.out.println("File Not Found");
}catch (IOException e){
System.out.println("Some I/O error");
}
}
public static void writeFile(String fileName){
try{
byte[] bytesToWrite = {1, 2, 3};
FileOutputStream f1 = new FileOutputStream(fileName);
f1.write(bytesToWrite);
f1.close();
System.out.println("Bytes written: " + bytesToWrite.length);
}catch (IOException e){
System.out.println("Some I/O error");
}
}
public static void bufferedIO(String fileName){
try {
InputStream inStream = null;
OutputStream outStream = null;
//Записать в файл некоторое количество байт
long timeStart = System.currentTimeMillis();
outStream = new FileOutputStream(fileName);
outStream = new BufferedOutputStream(outStream);
for(int i=1000000; --i>=0;){
outStream.write(i);
}
long time = System.currentTimeMillis() - timeStart;
System.out.println("Writing time: " + time + " ms");
outStream.close();
// Определить время считывания без буферизации
timeStart = System.currentTimeMillis();
inStream = new FileInputStream(fileName);
while(inStream.read()!=-1){
}
time = System.currentTimeMillis() - timeStart;
inStream.close();
System.out.println("Direct read time: " + (time) + " ms");
timeStart = System.currentTimeMillis();
inStream = new FileInputStream(fileName);
inStream = new BufferedInputStream(inStream);
while(inStream.read()!=-1){
}
time = System.currentTimeMillis() - timeStart;
inStream.close();
System.out.println("Buffered read time: " + (time) + " ms");
} catch (IOException e) {
System.out.println("IOException: " + e.toString());
}
}
public static void fileReaderDemo(String fileName){
try{
FileReader f1 = new FileReader(fileName);
System.out.println("Direct file reader:");
int c;
while ((c = f1.read()) != -1){
System.out.print((char) c);
}
f1.close();
System.out.println("\nBuffered file reader:");
FileReader f2 = new FileReader(fileName);
BufferedReader f = new BufferedReader(f2);
while ((c = f.read()) != -1){
System.out.print((char) c);
}
f2.close();
}catch (FileNotFoundException e){
System.out.println("File Not Found");
}catch (IOException e){
System.out.println("Some I/O error");
}
}
public static void fileWriterDemo(String fileName, String input){
try{
char buffer[] = new char[input.length()];
input.getChars(0, input.length(), buffer, 0);
FileWriter f1 = new FileWriter(fileName, true);
for(char item : buffer ){
f1.append(item);
}
f1.close();
}catch (IOException e){
System.out.println("Some I/O error");
}
}
public static void realAccess(String fileName){
try{
/*
r - read
rw - read/write
rws - read/write synchronize content/metadata
rwd - read/write synchronize content
*/
RandomAccessFile f = new RandomAccessFile(fileName, "rwd");
String str;
while((str = f.readLine()) != null){
System.out.println(str);
}
System.out.println(f.getFilePointer());
f.seek(6);
f.writeBytes("ITIS");
f.close();
}catch (IOException e){
System.out.println("Some I/O error");
}
}
public static void main(String[] args) {
//showInfo("D:/Test");
//showInfo("D:/Test/example.txt");
//readFile("D:/Test");
//readFile("D:/Test/example.txt");
//writeFile("D:/Test/example2");
//bufferedIO("D:/Test/m");
//fileReaderDemo("D:/Test/example.txt");
//fileWriterDemo("D:/Test/examplem.txt", "Hello World!\n");
//realAccess("D:/Test/examplem.txt");
}
}<file_sep>import test.ImdbEntity;
import java.math.BigDecimal;
import java.util.*;
import org.hibernate.*;
import org.hibernate.cfg.*;
import org.hibernate.metamodel.*;
import org.hibernate.service.*;
import org.hibernate.service.*;
public class Main {
private static SessionFactory sessionFactory;
private static ServiceRegistry serviceRegistry;
public static void main(String[] args) {
Configuration configuration = new Configuration();
configuration.configure();
serviceRegistry = new ServiceRegistryBuilder().applySettings(configuration.getProperties()).buildServiceRegistry();
sessionFactory = configuration.buildSessionFactory(serviceRegistry);
addFilm("СНЕГУРОЧКА", 1990, 9.9);
listFilm();
updateFilm("СНЕГУРОЧКА", 9.8);
deleteFilm("СНЕГУРОЧКА");
}
public static String addFilm(String name, int year, double rating){
Session session = sessionFactory.openSession();
Transaction tx = null;
String filmID = null;
try{
tx = session.beginTransaction();
ImdbEntity imdbObject = new ImdbEntity();
imdbObject.setName(name);
imdbObject.setYear(year);
imdbObject.setRating(new BigDecimal(rating));
filmID = (String) session.save(imdbObject);
tx.commit();
}catch (HibernateException e) {
if (tx!=null) tx.rollback();
e.printStackTrace();
}finally {
session.close();
}
return filmID;
}
public static void listFilm( ){
Session session = sessionFactory.openSession();
Transaction tx = null;
try{
tx = session.beginTransaction();
List employees = session.createQuery("from ImdbEntity where rating>8.5").list();
for (Iterator iterator =
employees.iterator(); iterator.hasNext();){
ImdbEntity film = (ImdbEntity) iterator.next();
System.out.println(film.getName() + " (" + film.getYear() + ") - " + film.getRating());
}
tx.commit();
}catch (HibernateException e) {
if (tx!=null) tx.rollback();
e.printStackTrace();
}finally {
session.close();
}
}
public static void updateFilm(String filmID, double rating ){
Session session = sessionFactory.openSession();
Transaction tx = null;
try{
tx = session.beginTransaction();
ImdbEntity film =
(ImdbEntity)session.get(ImdbEntity.class, filmID);
film.setRating(new BigDecimal(rating));
session.update(film);
tx.commit();
}catch (HibernateException e) {
if (tx!=null) tx.rollback();
e.printStackTrace();
}finally {
session.close();
}
}
public static void deleteFilm(String filmID){
Session session = sessionFactory.openSession();
Transaction tx = null;
try{
tx = session.beginTransaction();
ImdbEntity film =
(ImdbEntity)session.get(ImdbEntity.class, filmID);
session.delete(film);
tx.commit();
}catch (HibernateException e) {
if (tx!=null) tx.rollback();
e.printStackTrace();
}finally {
session.close();
}
}
}
<file_sep>package BankAccount;
/**
* Example 3. Bank Account (synchronized, locks)
*/
public class BankAccount extends Thread{
public int userId; // id пользователя
private double balance = 0; // сумма на счету
public volatile boolean deamonDie = false; // флаг для отключения потока
public Logger logger;
BankAccount(int userId, double startBalance, Logger logger){
this.userId = userId;
balance = startBalance;
balance = startBalance;
this.logger = logger;
this.logger.log("User #" + userId + ": start balance: " + getBalance());
}
public void run(){
System.out.println("Account " + userId + " is active");
try{
while(true){
if(deamonDie){
break;
}
yield(); // передача управления другому потоку
sleep(100);
// Пример потенциальных проблем с синхронизацией потоков
//logger.log("Thread for User #" + userId + " is Alive");
}
}catch (InterruptedException e){
}
System.out.println("Account " + userId + " closed");
}
public synchronized boolean addMoney(double sum){
if(sum > 0){
balance += sum;
logger.log("User #" + userId + ": added: " + sum);
logger.log("User #" + userId + ": current balance: " + getBalance());
return true;
}else{
return false;
}
}
public synchronized boolean removeMoney(double sum){
if(sum > 0 && balance > sum){
balance -= sum;
logger.log("User #" + userId + ": removed: " + sum);
logger.log("User #" + userId + ": current balance: " + getBalance());
return true;
}else{
return false;
}
}
public synchronized double getBalance(){
return balance;
}
}
<file_sep>public class Model extends java.util.Observable {
private int counter;
public Model(){
System.out.println("Model()");
}
public void setValue(int value) {
this.counter = value;
System.out.println("Model init: counter = " + counter);
setChanged();
notifyObservers(counter);
}
public void incrementValue() {
++counter;
System.out.println("Model : counter = " + counter);
setChanged();
notifyObservers(counter);
}
}<file_sep>/**
* Example 2. Call Center (synchronized)
*/
import java.util.Random;
/*
Создание call-центра из 5 работников
*/
public class CallCenter {
public static void main(String args[]) {
// Создается очередь звонков
CallQueue queue = new CallQueue();
Caller caller = new Caller("Jack", queue);
caller.start();
caller = new Caller("Barny", queue);
caller.start();
caller = new Caller("Mike", queue);
caller.start();
caller = new Caller("Harry", queue);
caller.start();
caller = new Caller("Denny", queue);
caller.start();
}
}
/*
Звонок
*/
class Call {
private String description;
private long duration;
public Call(String description, long duration) {
this.description = description;
this.duration = duration;
}
public void setDescription(String description) {
this.description = description;
}
public String getDescription() {
return this.description;
}
public void setDuration(long duration) {
this.duration = duration;
}
public long getDuration() {
return this.duration;
}
}
/*
Очередь звонков
*/
class CallQueue {
private Random random = new Random();
private String[] descriptions = new String[]{
"Low speed Internet",
"broke the Internet",
"The computer does not work",
"I can not go on facebook"
};
private String getRandomDescription() {
return descriptions[random.nextInt(descriptions.length)];
}
public synchronized Call getCall() {
return new Call(getRandomDescription(), random.nextInt(25) + 1);
}
}
/*
Класс-работник
*/
class Caller extends Thread {
private CallQueue queue;
private String name;
// Создается работник
public Caller(String name, CallQueue queue) {
this.queue = queue;
this.name = name;
}
@Override
public void run() {
// Бесконечно ожидает звонка
while (true) {
Call task = queue.getCall();
process(task);
}
}
// Отвечает
private void process(Call task) {
try {
sleep(task.getDuration() * 1000);
System.out.println(String.format("%s answered the call \"%s\" in %d seconds",
name,
task.getDescription(),
task.getDuration()));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}<file_sep>package Factory;
public class Factory {
public static Object factoryNotificator;
public static void main(String[] args) {
factoryNotificator = new Object();
for (int id = 0; id < 10; id++) {
Detail detail = new Detail(id, factoryNotificator);
detail.start();
}
Transporter transporter = new Transporter(factoryNotificator);
transporter.start();
}
}
<file_sep>// http://vk.com/dev/
// https://code.google.com/p/google-gson/
import com.google.gson.*;
import java.util.*;
public class VK {
public static void main(String[] args) {
Get get = new Get();
String response = get.executeGet("https://api.vk.com/method/wall.get?owner_id=-59311888");
System.out.println(response.replaceAll(",", ",\n"));
Gson gson = new Gson();
JsonObject json = gson.fromJson(response, JsonObject.class);
JsonObject post;
for(JsonElement o:json.getAsJsonArray("response")){
if(o.isJsonObject()){
post = o.getAsJsonObject();
System.out.println("ID: " + post.get("id") + " / " + new Date(post.get("date").getAsLong()) + "> " + post.get("text").getAsString());
}
}
}
}
<file_sep>import java.io.BufferedReader;
import java.io.*;
import java.net.*;
public class Get {
public static String executeGet(String targetURL){
HttpURLConnection connection = null;
try {
//Create connection
URL url = new URL(targetURL);
connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
//Get Response
InputStream is = connection.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
StringBuffer response = new StringBuffer();
while((line = rd.readLine()) != null) {
response.append(line);
response.append("\r\n");
}
rd.close();
return response.toString();
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
if(connection != null) {
connection.disconnect();
}
}
}
public static void main(String[] args) {
String outString = executeGet("http://www.flickr.com/search/?q=cats");
FileOutputStream out = null;
try{
out = new FileOutputStream("D:/Test/get_out_example.txt");
}catch(FileNotFoundException e){
e.printStackTrace();
}
try{
out.write(outString.getBytes());
}catch(IOException e){
e.printStackTrace();
}
}
}
<file_sep>-- Скрипт сгенерирован Devart dbForge Studio for MySQL, Версия 6.0.399.0
-- Домашняя страница продукта: http://www.devart.com/ru/dbforge/mysql/studio
-- Дата скрипта: 13.10.2013 12:49:43
-- Версия сервера: 5.5.31-MariaDB-log
-- Версия клиента: 4.1
--
-- Отключение внешних ключей
--
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
--
-- Установка кодировки, с использованием которой клиент будет посылать запросы на сервер
--
SET NAMES 'utf8';
--
-- Описание для таблицы imdb
--
DROP TABLE IF EXISTS imdb;
CREATE TABLE imdb (
name VARCHAR(255) DEFAULT NULL,
year INT(4) DEFAULT NULL,
rating DECIMAL(2, 1) DEFAULT NULL
)
ENGINE = INNODB
AVG_ROW_LENGTH = 65
CHARACTER SET utf8
COLLATE utf8_general_ci;
--
-- Описание для таблицы kinopoisk
--
DROP TABLE IF EXISTS kinopoisk;
CREATE TABLE kinopoisk (
name VARCHAR(255) DEFAULT NULL,
year INT(4) DEFAULT NULL,
rating DECIMAL(4, 3) DEFAULT NULL
)
ENGINE = INNODB
AVG_ROW_LENGTH = 65
CHARACTER SET utf8
COLLATE utf8_general_ci;
--
-- Вывод данных для таблицы imdb
--
INSERT INTO imdb VALUES
('The Shawshank Redemption', 1994, 9.2),
('The Godfather', 1972, 9.2),
('The Godfather: Part II', 1974, 9.0),
('Pulp Fiction', 1994, 8.9),
('The Good, the Bad and the Ugly', 1966, 8.9),
('The Dark Knight', 2008, 8.9),
('12 Angry Men', 1957, 8.9),
('Schindler''s List', 1993, 8.9),
('The Lord of the Rings: The Return of the King', 2003, 8.8),
('Fight Club', 1999, 8.8),
('Star Wars: Episode V - The Empire Strikes Back', 1980, 8.8),
('The Lord of the Rings: The Fellowship of the Ring', 2001, 8.8),
('One Flew Over the Cuckoo''s Nest', 1975, 8.7),
('Goodfellas', 1990, 8.7),
('Inception', 2010, 8.7),
('<NAME>', 1954, 8.7),
('Star Wars', 1977, 8.7),
('<NAME>', 1994, 8.7),
('The Matrix', 1999, 8.7),
('The Lord of the Rings: The Two Towers', 2002, 8.7),
('City of God', 2002, 8.6),
('The Silence of the Lambs', 1991, 8.6),
('Se7en', 1995, 8.6),
('Once Upon a Time in the West', 1968, 8.6),
('Casablanca', 1942, 8.6),
('The Usual Suspects', 1995, 8.6),
('Raiders of the Lost Ark', 1981, 8.6),
('Rear Window', 1954, 8.6),
('It''s a Wonderful Life', 1946, 8.6),
('Psycho', 1960, 8.6),
('Léon: The Professional', 1994, 8.6),
('Sunset Blvd.', 1950, 8.5),
('American History X', 1998, 8.5),
('Terminator 2: Judgment Day', 1991, 8.5),
('Apocalypse Now', 1979, 8.5),
('Memento', 2000, 8.5),
('Saving Private Ryan', 1998, 8.5),
('City Lights', 1931, 8.5),
('Gravity', 2013, 8.5),
('Dr. Strangelove or: How I Learned to Stop Worrying and Love the Bomb', 1964, 8.5),
('Alien', 1979, 8.5),
('Modern Times', 1936, 8.5),
('Spirited Away', 2001, 8.5),
('North by Northwest', 1959, 8.5),
('Back to the Future', 1985, 8.5),
('The Pianist', 2002, 8.4),
('<NAME>', 1941, 8.4),
('Life Is Beautiful', 1997, 8.4),
('M', 1931, 8.4),
('The Shining', 1980, 8.4),
('The Departed', 2006, 8.4),
('Paths of Glory', 1957, 8.4),
('Vertigo', 1958, 8.4),
('American Beauty', 1999, 8.4),
('Django Unchained', 2012, 8.4),
('Double Indemnity', 1944, 8.4),
('The Dark Knight Rises', 2012, 8.4),
('Aliens', 1986, 8.4),
('Taxi Driver', 1976, 8.4),
('The Green Mile', 1999, 8.4),
('The Intouchables', 2011, 8.4),
('Gladiator', 2000, 8.4),
('WALL·E', 2008, 8.4),
('The Lives of Others', 2006, 8.4),
('Toy Story 3', 2010, 8.4),
('The Great Dictator', 1940, 8.4),
('The Prestige', 2006, 8.4),
('A Clockwork Orange', 1971, 8.4),
('Amélie', 2001, 8.4),
('Lawrence of Arabia', 1962, 8.4),
('To Kill a Mockingbird', 1962, 8.4),
('Reservoir Dogs', 1992, 8.4),
('Das Boot', 1981, 8.4),
('<NAME>', 1988, 8.3),
('The Lion King', 1994, 8.3),
('The Treasure of the Sierra Madre', 1948, 8.3),
('The Third Man', 1949, 8.3),
('Once Upon a Time in America', 1984, 8.3),
('Requiem for a Dream', 2000, 8.3),
('Star Wars: Episode VI - Return of the Jedi', 1983, 8.3),
('Eternal Sunshine of the Spotless Mind', 2004, 8.3),
('Full Metal Jacket', 1987, 8.3),
('Braveheart', 1995, 8.3),
('L.A. Confidential', 1997, 8.3),
('Oldboy', 2003, 8.3),
('Singin'' in the Rain', 1952, 8.3),
('Metropolis', 1927, 8.3),
('Chinatown', 1974, 8.3),
('Some Like It Hot', 1959, 8.3),
('Rashomon', 1950, 8.3),
('<NAME>', 1948, 8.3),
('All About Eve', 1950, 8.3),
('<NAME> and the Holy Grail', 1975, 8.3),
('<NAME>', 1997, 8.3),
('Amadeus', 1984, 8.3),
('2001: A Space Odyssey', 1968, 8.3),
('Witness for the Prosecution', 1957, 8.3),
('The Sting', 1973, 8.3),
('The Apartment', 1960, 8.3),
('Unforgiven', 1992, 8.3),
('Grave of the Fireflies', 1988, 8.3),
('Indiana Jones and the Last Crusade', 1989, 8.3),
('Raging Bull', 1980, 8.3),
('The Bridge on the River Kwai', 1957, 8.3),
('Die Hard', 1988, 8.3),
('Yojimbo', 1961, 8.3),
('<NAME>', 2005, 8.3),
('A Separation', 2011, 8.3),
('Inglourious Basterds', 2009, 8.2),
('For a Few Dollars More', 1965, 8.2),
('Snatch.', 2000, 8.2),
('Mr. <NAME> to Washington', 1939, 8.2),
('Toy Story', 1995, 8.2),
('On the Waterfront', 1954, 8.2),
('The Great Escape', 1963, 8.2),
('Downfall', 2004, 8.2),
('Pan''s Labyrinth', 2006, 8.2),
('Up', 2009, 8.2),
('The General', 1926, 8.2),
('The Seventh Seal', 1957, 8.2),
('Heat', 1995, 8.2),
('The Elephant Man', 1980, 8.2),
('The Maltese Falcon', 1941, 8.2),
('Blade Runner', 1982, 8.2),
('The Kid', 1921, 8.2),
('Wild Strawberries', 1957, 8.2),
('Rebecca', 1940, 8.2),
('Scarface', 1983, 8.2),
('Ikiru', 1952, 8.2),
('Fargo', 1996, 8.2),
('Ran', 1985, 8.2),
('<NAME>', 2008, 8.2),
('The Big Lebowski', 1998, 8.2),
('Touch of Evil', 1958, 8.2),
('The Gold Rush', 1925, 8.2),
('The Deer Hunter', 1978, 8.2),
('Cool Hand Luke', 1967, 8.2),
('It Happened One Night', 1934, 8.1),
('Diabolique', 1955, 8.1),
('Lock, Stock and Two Smoking Barrels', 1998, 8.1),
('No Country for Old Men', 2007, 8.1),
('The Sixth Sense', 1999, 8.1),
('Good Will Hunting', 1997, 8.1),
('Jaws', 1975, 8.1),
('Casino', 1995, 8.1),
('Judgment at Nuremberg', 1961, 8.1),
('Strangers on a Train', 1951, 8.1),
('The Wizard of Oz', 1939, 8.1),
('Platoon', 1986, 8.1),
('<NAME> and the Sundance Kid', 1969, 8.1),
('The Grapes of Wrath', 1940, 8.1),
('Sin City', 2005, 8.1),
('Kill Bill: Vol. 1', 2003, 8.1),
('The Hunt', 2012, 8.1),
('Trainspotting', 1996, 8.1),
('The Thing', 1982, 8.1),
('Gone with the Wind', 1939, 8.1),
('<NAME>', 1977, 8.1),
('Hotel Rwanda', 2004, 8.1),
('High Noon', 1952, 8.1),
('Warrior', 2011, 8.1),
('The Secret in Their Eyes', 2009, 8.1),
('Finding Nemo', 2003, 8.1),
('My Neighbor Totoro', 1988, 8.1),
('V for Vendetta', 2005, 8.1),
('The Avengers', 2012, 8.1),
('Dial M for Murder', 1954, 8.1),
('Notorious', 1946, 8.1),
('How to Train Your Dragon', 2010, 8.1),
('Life of Brian', 1979, 8.1),
('Into the Wild', 2007, 8.1),
('The Best Years of Our Lives', 1946, 8.1),
('Network', 1976, 8.1),
('The Terminator', 1984, 8.1),
('Million Dollar Baby', 2004, 8.1),
('There Will Be Blood', 2007, 8.1),
('Ben-Hur', 1959, 8.1),
('The Night of the Hunter', 1955, 8.1),
('The King''s Speech', 2010, 8.1),
('Stand by Me', 1986, 8.1),
('The Big Sleep', 1946, 8.1),
('Twelve Monkeys', 1995, 8.1),
('The 400 Blows', 1959, 8.1),
('Groundhog Day', 1993, 8.1),
('<NAME>', 2001, 8.0),
('Dog Day Afternoon', 1975, 8.0),
('<NAME>', 2000, 8.0),
('Howl''s Moving Castle', 2004, 8.0),
('Gandhi', 1982, 8.0),
('<NAME>', 2009, 8.0),
('Rush', 2013, 8.0),
('The Bourne Ultimatum', 2007, 8.0),
('A Beautiful Mind', 2001, 8.0),
('The Killing', 1956, 8.0),
('Persona', 1966, 8.0),
('The Graduate', 1967, 8.0),
('<NAME>', 2010, 8.0),
('<NAME>', 1987, 8.0),
('<NAME>', 1961, 8.0),
('Who''s Afraid of <NAME>?', 1966, 8.0),
('The Man Who Shot Liberty Valance', 1962, 8.0),
('<NAME>', 1954, 8.0),
('Anatomy of a Murder', 1959, 8.0),
('The Manchurian Candidate', 1962, 8.0),
('Rocky', 1976, 8.0),
('8½', 1963, 8.0),
('The Exorcist', 1973, 8.0),
('Sl<NAME>', 2008, 8.0),
('In the Name of the Father', 1993, 8.0),
('Rope', 1948, 8.0),
('<NAME>', 1953, 8.0),
('<NAME> Alexander', 1982, 8.0),
('Monsters, Inc.', 2001, 8.0),
('<NAME>', 1975, 8.0),
('The Wild Bunch', 1969, 8.0),
('Infernal Affairs', 2002, 8.0),
('The Truman Show', 1998, 8.0),
('Life of Pi', 2012, 8.0),
('Roman Holiday', 1953, 8.0),
('Pirates of the Caribbean: The Curse of the Black Pearl', 2003, 8.0),
('Memories of Murder', 2003, 8.0),
('All Quiet on the Western Front', 1930, 8.0),
('<NAME> and the Deathly Hallows: Part 2', 2011, 8.0),
('Sleuth', 1972, 8.0),
('Stalker', 1979, 8.0),
('<NAME>', 1993, 8.0),
('A Streetcar Named Desire', 1951, 8.0),
('Star Trek', 2009, 8.0),
('Ratatouille', 2007, 8.0),
('Ip Man', 2008, 8.0),
('A Fistful of Dollars', 1964, 8.0),
('The Diving Bell and the Butterfly', 2007, 8.0),
('The Hobbit: An Unexpected Journey', 2012, 8.0),
('District 9', 2009, 8.0),
('Shutter Island', 2010, 8.0),
('Rain Man', 1988, 8.0),
('Incendies', 2010, 8.0),
('<NAME>', 1995, 8.0),
('Rosemary''s Baby', 1968, 8.0),
('3 Idiots', 2009, 8.0),
('Nausicaä of the Valley of the Wind', 1984, 8.0),
('The Artist', 2011, 8.1),
('Beauty and the Beast', 1991, 7.9),
('Bringing Up Baby', 1938, 7.9),
('Before Sunrise', 1995, 7.9),
('Three Colors: Red', 1994, 7.9),
('Mystic River', 2003, 7.9),
('Papillon', 1973, 7.9),
('In the Heat of the Night', 1967, 7.9),
('Arsenic and Old Lace', 1944, 7.9);
--
-- Вывод данных для таблицы kinopoisk
--
INSERT INTO kinopoisk VALUES
('The Shawshank Redemption', 1994, 9.212),
('The Green Mile', 1999, 9.161),
('<NAME>', 1994, 9.025),
('Intouchables', 2011, 8.910),
('Schindler''s List', 1993, 8.882),
('Léon: The Professional', 1994, 8.794),
('The Lion King', 1994, 8.789),
('Inception', 2010, 8.784),
('Fight Club', 1999, 8.740),
('La Vita è bella', 1997, 8.705),
('<NAME> меняет профессию', 1973, 8.696),
('Knockin'' on Heaven''s Door', 1997, 8.687),
('The Godfather', 1972, 8.675),
('The Prestige', 2006, 8.657),
('A Beautiful Mind', 2001, 8.647),
('Pulp Fiction', 1994, 8.644),
('Операция «Ы» и другие приключения Шурика', 1965, 8.623),
('Gladiator', 2000, 8.603),
('The Lord of the Rings: The Return of the King', 2003, 8.601),
('Lock, Stock and Two Smoking Barrels', 1998, 8.592),
('Back to the Future', 1985, 8.571),
('The Pianist', 2002, 8.559),
('Catch Me If You Can', 2002, 8.545),
('The Departed', 2006, 8.544),
('Snatch.', 2000, 8.538),
('Бриллиантовая рука', 1968, 8.526),
('One Flew Over the Cuckoo''s Nest', 1975, 8.526),
('В бой идут одни «старики»', 1973, 8.526),
('American History X', 1998, 8.522),
('The Lord of the Rings: The Fellowship of the Ring', 2001, 8.520),
('The Matrix', 1999, 8.512),
('The Lord of the Rings: The Two Towers', 2002, 8.511),
('WALL·E', 2008, 8.503),
('Scent of a Woman', 1992, 8.502),
('Pirates of the Caribbean: The Curse of the Black Pearl', 2003, 8.501),
('The Dark Knight', 2008, 8.499),
('Shutter Island', 2009, 8.498),
('Django Unchained', 2012, 8.497),
('Awakenings', 1990, 8.495),
('City Lights', 1931, 8.493),
('Hachi: A Dog''s Tale', 2008, 8.491),
('Джентльмены удачи', 1971, 8.490),
('Les Choristes', 2004, 8.490),
('Il Buono, il brutto, il cattivo', 1966, 8.485),
('Some Like It Hot', 1959, 8.475),
('It''s a Wonderful Life', 1946, 8.474),
('<NAME> и <NAME>: <NAME>', 1981, 8.470),
('12 Angry Men', 1957, 8.469),
('Sen to Chihiro no kamikakushi', 2001, 8.463),
('The Godfather: Part II', 1974, 8.461),
('Titanic', 1997, 8.459),
('Se7en', 1995, 8.449),
('The Truman Show', 1998, 8.440),
('The Silence of the Lambs', 1990, 8.440),
('Cinderella Man', 2005, 8.438),
('...А зори здесь тихие', 1972, 8.436),
('How to Train Your Dragon', 2010, 8.435),
('Кавказская пленница, или Новые приключения Шурика', 1966, 8.428),
('The Game', 1997, 8.428),
('<NAME> и <NAME>: Знакомство', 1979, 8.427),
('Braveheart', 1995, 8.410),
('Собачье сердце', 1988, 8.404),
('The Help', 2011, 8.404),
('Офицеры', 1971, 8.397),
('Gravity', 2013, 8.396),
('The Pursuit of Happyness', 2006, 8.391),
('The Kid', 1921, 8.386),
('Terminator 2: Judgment Day', 1991, 8.381),
('Rain Man', 1988, 8.370),
('The Dark Knight Rises', 2012, 8.368),
('<NAME>', 2008, 8.362),
('Cast Away', 2000, 8.357),
('Balto', 1995, 8.357),
('<NAME>', 1959, 8.356),
('Once Upon a Time in America', 1983, 8.355),
('<NAME>', 1957, 8.348),
('The Sixth Sense', 1999, 8.345),
('Roman Holiday', 1953, 8.343),
('The Notebook', 2004, 8.341),
('The Hobbit: An Unexpected Journey', 2012, 8.340),
('<NAME>', 1976, 8.333),
('Saving Private Ryan', 1998, 8.332),
('Il bisbetico domato', 1980, 8.330),
('Amadeus', 1984, 8.329),
('Москва слезам не верит', 1979, 8.328),
('Легенда №17', 2012, 8.326),
('Gone with the Wind', 1939, 8.320),
('Beauty and the Beast', 1991, 8.318),
('Das Leben der Anderen', 2006, 8.317),
('Hauru no ugoku shiro', 2004, 8.316),
('The Butterfly Effect', 2004, 8.315),
('Back to the Future Part II', 1989, 8.313),
('<NAME> и доктор Ватсон: Смертельная схватка', 1980, 8.311),
('Они сражались за Родину', 1975, 8.308),
('Inside I''m Dancing', 2004, 8.306),
('Lucky Number Slevin', 2005, 8.303),
('Modern Times', 1936, 8.297),
('The Artist', 2011, 8.295),
('The Boy in the Striped Pyjamas', 2008, 8.290),
('<NAME> и доктор Ватсон: Кровавая надпись', 1979, 8.279),
('Hotaru no haka', 1988, 8.279),
('La leggenda del pianista sull''oceano', 1998, 8.278),
('How to Steal a Million', 1966, 8.278),
('My Name Is Khan', 2010, 8.275),
('The Devil''s Advocate', 1997, 8.274),
('Casino', 1995, 8.274),
('Баллада о солдате', 1959, 8.272),
('Dead Poets Society', 1989, 8.270),
('<NAME> и доктор Ватсон: Охота на тигра', 1980, 8.264),
('Служебный роман', 1977, 8.260),
('The Blind Side', 2009, 8.260),
('Ray', 2004, 8.256),
('Ocean''s Eleven', 2001, 8.253),
('Seven Pounds', 2008, 8.253),
('<NAME> и доктор Ватсон: Король шантажа', 1980, 8.245),
('The Life of David Gale', 2003, 8.243),
('The World''s Fastest Indian', 2005, 8.241),
('The Sting', 1973, 8.238),
('Million Dollar Baby', 2004, 8.236),
('Добро пожаловать, или Посторонним вход воспрещен', 1964, 8.236),
('Per qualche dollaro in più', 1965, 8.235),
('Cidade de Deus', 2002, 8.231),
('Девчата', 1961, 8.230),
('Goodfellas', 1990, 8.230),
('Брат', 1997, 8.227),
('Warrior', 2011, 8.224),
('Scarface', 1983, 8.221),
('Bluff storia di truffe e di imbroglioni', 1976, 8.221),
('Dances with Wolves', 1990, 8.220),
('<NAME> и доктор Ватсон: Сок<NAME>', 1983, 8.218),
('<NAME>', 1968, 8.218),
('<NAME>', 1984, 8.216),
('<NAME>', 1966, 8.214),
('Pay It Forward', 2000, 8.212),
('<NAME>', 1954, 8.212),
('Dial M for Murder', 1954, 8.209),
('Отец солдата', 1964, 8.209),
('Ghost', 1990, 8.207),
('Gandhi', 1982, 8.206),
('<NAME>', 2005, 8.202),
('Home Alone', 1990, 8.200),
('Groundhog Day', 1993, 8.199),
('<NAME>', 2009, 8.198),
('Вам и не снилось...', 1980, 8.197),
('The Terminal', 2004, 8.196),
('Eight Below', 2006, 8.196),
('October Sky', 1999, 8.196),
('Good Will Hunting', 1997, 8.194),
('<NAME>', 1988, 8.193),
('The Hurricane', 1999, 8.193),
('Aladdin', 1992, 8.190),
('Остров сокровищ', 1988, 8.190),
('Reservoir Dogs', 1991, 8.189),
('Pirates of the Caribbean: Dead Man''s Chest', 2006, 8.188),
('Witness for the Prosecution', 1957, 8.187),
('The Elephant Man', 1980, 8.186),
('The Fifth Element', 1997, 8.185),
('Back to the Future Part III', 1990, 8.184),
('12 стульев', 1971, 8.183),
('The King''s Speech', 2010, 8.182),
('L''ours', 1988, 8.179),
('K-PAX', 2001, 8.176),
('Freedom Writers', 2007, 8.176),
('Золот<NAME>еленок', 1968, 8.172),
('Тот самый Мюнхгаузен', 1979, 8.172),
('The Circus', 1928, 8.171),
('Temp<NAME>', 2010, 8.169),
('Apocalypse Now', 1979, 8.168),
('Eternal Sunshine of the Spotless Mind', 2004, 8.163),
('The Gold Rush', 1925, 8.162),
('Star Wars: Episode III - Revenge of the Sith', 2005, 8.161),
('Yip Man', 2008, 8.161),
('<NAME>', 2007, 8.160),
('Ice Age', 2002, 8.159),
('Le Fabuleux destin d''<NAME>', 2001, 8.159),
('Star Wars: Episode VI - Return of the Jedi', 1983, 8.159),
('Hooligans', 2004, 8.158),
('Singin'' in the Rain', 1952, 8.158),
('The Fox and the Hound', 1981, 8.157),
('Heat', 1995, 8.156),
('Men of Honor', 2000, 8.153),
('In<NAME> and the Last Crusade', 1989, 8.148),
('The Bucket List', 2007, 8.147),
('I Am Sam', 2001, 8.144),
('Mononoke-hime', 1997, 8.143),
('Monsters, Inc.', 2001, 8.141),
('<NAME>', 2006, 8.141),
('3 Idiots', 2009, 8.141),
('The Illusionist', 2006, 8.140),
('Le professionnel', 1981, 8.140),
('Иваново детство', 1962, 8.140),
('The Great Gatsby', 2013, 8.139),
('Nuovo Cinema Paradiso', 1988, 8.136),
('Покровские ворота', 1982, 8.135),
('Psycho', 1960, 8.134),
('<NAME>', 2000, 8.134),
('Crash', 2004, 8.133),
('Pink Floyd The Wall', 1982, 8.133),
('Завтра была война', 1987, 8.131),
('A Perfect World', 1993, 8.130),
('American Beauty', 1999, 8.129),
('Le notti di Cabiria', 1957, 8.129),
('Det sjunde inseglet', 1957, 8.128),
('Into the Wild', 2007, 8.127),
('To Kill a Mockingbird', 1962, 8.127),
('Rear Window', 1954, 8.125),
('Chaplin', 1992, 8.125),
('Life of Pi', 2012, 8.124),
('<NAME>', 1987, 8.120),
('Znachor', 1981, 8.119),
('The Great Dictator', 1940, 8.117),
('Bom yeoreum gaeul gyeoul geurigo bom', 2003, 8.116),
('Холодное лето пятьдесят третьего', 1987, 8.116),
('Les Misérables', 2012, 8.112),
('Жестокий романс', 1984, 8.112),
('Star Wars: Episode V - The Empire Strikes Back', 1980, 8.111),
('The Curious Case of Benjamin Button', 2008, 8.110),
('Meet <NAME>', 1998, 8.109),
('<NAME>', 2003, 8.109),
('Gattaca', 1997, 8.108),
('L.A. Confidential', 1997, 8.108),
('<NAME>', 1975, 8.108),
('The Last Samurai', 2003, 8.105),
('Гамлет', 1964, 8.105),
('Anastasia', 1997, 8.103),
('The Breakfast Club', 1985, 8.101),
('<NAME> and the Deathly Hallows: Part 2', 2011, 8.100),
('<NAME> and the Sorcerer''s Stone', 2001, 8.100),
('<NAME> and the Prisoner of Azkaban', 2004, 8.100),
('Changeling', 2008, 8.100),
('Dogville', 2003, 8.100),
('Освобождение: Битва за Берлин', 1971, 8.099),
('Освобождение: Последний штурм', 1971, 8.099),
('<NAME>', 1955, 8.098),
('Обыкновенное чудо', 1978, 8.098),
('<NAME>', 2009, 8.098),
('Interstate 60', 2001, 8.097),
('<NAME>томобиля', 1966, 8.097),
('La môme', 2007, 8.097),
('Доживем до понедельника', 1968, 8.097),
('Однажды двадцать лет спустя', 1980, 8.097),
('Смех и горе у Бела моря', 1988, 8.097),
('Mulan', 1998, 8.096),
('In<NAME>', 1981, 8.096),
('Мужики!..', 1981, 8.096),
('The Iron Giant', 1999, 8.095),
('The Mighty', 1998, 8.095),
('Аты-баты, шли солдаты...', 1976, 8.095),
('Shrek', 2001, 8.094),
('Брат 2', 2000, 8.094);
--
-- Включение внешних ключей
--
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;<file_sep>import java.util.*;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
enum Day {
SUNDAY, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY
}
enum Direction {
UP (38) {
public Direction opposite() { return DOWN; }
},
DOWN (40) {
public Direction opposite() { return UP; }
},
RIGHT (39) {
public Direction opposite() { return LEFT; }
},
LEFT (37) {
public Direction opposite() { return RIGHT; }
};
int keyboardNumber;
Direction(int keyboardNumber) { this.keyboardNumber = keyboardNumber; }
public abstract Direction opposite();
}
public class Main {
/*
Simple enum example
*/
static void enumDay(Day currentDay){
if(currentDay == Day.SUNDAY){
System.out.println("I don't like Sunday");
} else if(currentDay == Day.MONDAY){
System.out.println("I like it!");
} else{
System.out.println("OK!");
}
}
/*
Enum example with methods
*/
static void enumDirection(Direction dir){
System.out.println("Current direction is " + dir);
System.out.println("Key number is " + dir.keyboardNumber);
System.out.println("Opposite direction is " + dir.opposite());
}
/*
* StringTokenizer and split
*/
static void tokenizing (String sourceString, String delimiter){
// StringTokenizer
StringTokenizer st = new StringTokenizer(sourceString, delimiter);
while (st.hasMoreTokens()) {
System.out.println(st.nextToken());
}
// Split method of String Class
String[] result = sourceString.split(delimiter);
for (int x=0; x<result.length; x++){
System.out.println(result[x]);
}
}
/*
Hash functions
*/
static String getMD5(String inputString) {
try {
MessageDigest md = MessageDigest.getInstance("MD5"); // DSA, RSA, MD5, SHA-1, SHA-256
byte[] messageDigest = md.digest(inputString.getBytes());
BigInteger number = new BigInteger(1, messageDigest);
String hashtext = number.toString(16);
while (hashtext.length() < 32) {
hashtext = "0" + hashtext;
}
return hashtext;
}
catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
public static void main(String[] args) {
enumDay(Day.MONDAY);
enumDirection(Direction.LEFT);
tokenizing("My test string.", "t");
System.out.println(getMD5("ITIS"));
}
}
<file_sep>import java.awt.*;
import javax.swing.*;
public class ComponentOrientationTest extends JFrame {
private final static int WIDTH = 410;
private final static int HEIGHT = 220;
public ComponentOrientationTest() {
super("Component orientation test");
JPanel content = new JPanel(new BorderLayout(5, 5));
content.add(createLabel("Top"), BorderLayout.NORTH);
content.add(createLabel("Bottom"), BorderLayout.SOUTH);
content.add(createLabel("Left"), BorderLayout.WEST);
content.add(createLabel("Right"), BorderLayout.EAST);
content.add(createLabel("Center"), BorderLayout.CENTER);
setContentPane(content);
setSize(WIDTH, HEIGHT);
setDefaultCloseOperation(EXIT_ON_CLOSE);
}
private JLabel createLabel(String caption) {
JLabel lbl = new JLabel(caption);
lbl.setPreferredSize(new Dimension(100, 50));
lbl.setHorizontalAlignment(JLabel.CENTER);
lbl.setBorder(BorderFactory.createLineBorder(new Color(0xff8000), 3));
return lbl;
}
}<file_sep>import java.util.concurrent.*;
public class DelayedQueueExample {
public static void main(String[] args) {
int n = 4;
DelayQueue<DelayedThread> queue=new DelayQueue<DelayedThread>();
for (int i = 0; i < n; i++){
queue.add(new DelayedThread());
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
while(!queue.isEmpty()){
new Thread(queue.poll()).start();
}
}
}
class DelayedThread implements Runnable,Delayed {
static public int count=0;
private TimeUnit time=TimeUnit.NANOSECONDS;
private int id=count++;
public void run() {
System.out.println("Thread #" + id + " started");
}
public long getDelay(TimeUnit unit){
return unit.convert(id*100000000000L-System.nanoTime(),time) ;
}
public int compareTo(Delayed o) {
if(this.getDelay(time)<o.getDelay(time))
return -1;
if(this.getDelay(time)>o.getDelay(time))
return 1;
return 0;
}
}<file_sep>import java.sql.*;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Transaction {
static Connection connection = null;
static boolean sendMoney(String from, String to, int sum){
try{
connection.setAutoCommit(false);
PreparedStatement preparedStatement = connection.prepareStatement(
"SELECT * FROM `money` where `name` IN(?,?) GROUP BY `name`");
preparedStatement.setString(1, from);
preparedStatement.setString(2, to);
ResultSet result = preparedStatement.executeQuery();
int acceptFlag = 0;
while (result.next()) {
if((result.getString("name").equals(from) && result.getFloat("balance")>=sum) ||
result.getString("name").equals(to)){
acceptFlag++;
}
}
if(acceptFlag==2){
preparedStatement = connection.prepareStatement(
"UPDATE `money` SET `balance`=`balance`+? WHERE name=?");
preparedStatement.setInt(1, sum);
preparedStatement.setString(2, to);
preparedStatement.execute();
preparedStatement = connection.prepareStatement(
"UPDATE `money` SET `balance`=`balance`-? WHERE name=?");
preparedStatement.setInt(1, sum);
preparedStatement.setString(2, from);
preparedStatement.execute();
}
connection.commit();
} catch (Exception ex) {
Logger.getLogger(Transaction.class.getName()).log(Level.SEVERE, null, ex);
try{
connection.rollback();
} catch (Exception e) {
}
}
return false;
}
static boolean simpleSendMoney(String from, String to, int sum){
try{
Statement statement = connection.createStatement();
String query = "UPDATE `money` SET `balance`=`balance`+" + sum + " WHERE name='"+to+"'";
System.out.println(query);
statement.executeUpdate(query);
query = "UPDATE `money` SET `balance`=`balance`-" + sum + " WHERE name='"+from+"'";
System.out.println(query);
statement.executeUpdate(query);
} catch (Exception ex) {
Logger.getLogger(Transaction.class.getName()).log(Level.SEVERE, null, ex);
try{
connection.rollback();
} catch (Exception e) {
}
}
return false;
}
public static void main(String[] args) {
String url = "jdbc:mysql://localhost:3306/itis";
String name = "root";
String password = "";
try{
Class.forName("com.mysql.jdbc.Driver");
connection = DriverManager.getConnection(url, name, password);
simpleSendMoney("Alice", "Bob", 1000);
//sendMoney("Alice", "Bob", 1);
} catch (Exception ex) {
Logger.getLogger(Transaction.class.getName()).log(Level.SEVERE, null, ex);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException ex) {
Logger.getLogger(Transaction.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
}
<file_sep>import java.util.*;
public class TimerTaskExample {
public static void main(String[] args) {
Timer timer = new Timer();
final CalculatePrimes calculator = new CalculatePrimes();
calculator.start();
TimerTask tt = new TimerTask() {
public void run()
{
calculator.finished = true;
}
};
timer.schedule(tt, 10000);
}
}
<file_sep>CREATE TABLE money (
id INT(11) DEFAULT NULL,
name VARCHAR(50) DEFAULT NULL,
balance FLOAT DEFAULT NULL
)
INSERT INTO money VALUES
(1, 'Alice', 1500),
(2, 'Bob', 0),
(3, 'Charlie', 10000);<file_sep>CREATE TABLE imdb (
name VARCHAR(255) NOT NULL,
year INT(4) DEFAULT NULL,
rating DECIMAL(2, 1) DEFAULT NULL,
PRIMARY KEY (name)
)
ENGINE = INNODB
AVG_ROW_LENGTH = 65
CHARACTER SET utf8
COLLATE utf8_general_ci;
CREATE TABLE kinopoisk (
name VARCHAR(255) DEFAULT NULL,
year INT(4) DEFAULT NULL,
rating DECIMAL(4, 3) DEFAULT NULL
)
ENGINE = INNODB
AVG_ROW_LENGTH = 65
CHARACTER SET utf8
COLLATE utf8_general_ci;
CREATE TABLE money (
id INT(11) DEFAULT NULL,
name VARCHAR(50) DEFAULT NULL,
balance FLOAT DEFAULT NULL
)
ENGINE = INNODB
AVG_ROW_LENGTH = 5461
CHARACTER SET utf8
COLLATE utf8_general_ci;<file_sep>package Chat;
import java.io.*;
import java.net.*;
public class ChatClient
{
public static final String SERVER_HOSTNAME = "localhost";
public static final int SERVER_PORT = 2002;
public static void main(String[] args)
{
BufferedReader in = null;
PrintWriter out = null;
try {
Socket socket = new Socket(SERVER_HOSTNAME, SERVER_PORT);
in = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
out = new PrintWriter(
new OutputStreamWriter(socket.getOutputStream()));
System.out.println("Connected to server " +
SERVER_HOSTNAME + ":" + SERVER_PORT);
} catch (IOException ioe) {
System.err.println("Can not establish connection to " +
SERVER_HOSTNAME + ":" + SERVER_PORT);
ioe.printStackTrace();
System.exit(-1);
}
// Create and start Sender thread
Sender sender = new Sender(out);
sender.setDaemon(true);
sender.start();
try {
// Read messages from the server and print them
String message;
while ((message=in.readLine()) != null) {
System.out.println(message);
}
} catch (IOException ioe) {
System.err.println("Connection to server broken.");
ioe.printStackTrace();
}
}
}
class Sender extends Thread
{
private PrintWriter mOut;
public Sender(PrintWriter aOut)
{
mOut = aOut;
}
/**
* Until interrupted reads messages from the standard input (keyboard)
* and sends them to the chat server through the socket.
*/
public void run()
{
try {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
while (!isInterrupted()) {
String message = in.readLine();
mOut.println(message);
mOut.flush();
}
} catch (IOException ioe) {
// Communication is broken
}
}
} | d76ff69dd6f41c93637f81d56b0d57dafd1a2132 | [
"Markdown",
"Java",
"SQL"
] | 27 | Java | mubinov/itis | e2e52298431117100527f030cc1a3d1b5c5c735c | b5fe6875515f7920e8dc2e65c446932161a7dee8 |
refs/heads/master | <repo_name>Roman-Suprun/My_.NET_Portpholio<file_sep>/MyPortpholio/Scripts/javascript.js
function LoadAnimation() {
var c=0;
var el = document.getElementById('percent');
function count() {
c++;
el.innerHTML = c +"%";
if(c==100)
{
clearInterval(resresh);
panel.className += 'PanelVisbility';
logo.className+= 'logoVisible';
percent.className+= 'percentHidden';
console.log(logo.className);
}
}
var random = Math.random() * (70-10) + 10;
var resresh = setInterval(count, random);
}
function slider() {
$('.slick-slider').slick();
}
<file_sep>/MyPortpholio/Controllers/EmailController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Mail;
using System.Web;
using System.Web.Helpers;
using System.Web.Mvc;
using MyPortpholio.Models;
namespace MyPortpholio.Controllers
{
public class EmailController : Controller
{
// GET: Email
public ActionResult EmailSuccess()
{
return View();
}
[HttpPost]
public ActionResult Index(Emailmodel email_obj)
{
try
{
MailMessage mail = new MailMessage();
mail.To.Add("<EMAIL>");
mail.From = new MailAddress("<EMAIL>");
mail.Subject = email_obj.EmailSubject;
string Body = email_obj.Message;
mail.Body = Body;
mail.IsBodyHtml = true;
SmtpClient smtp = new SmtpClient();
smtp.Host = "smtp.gmail.com";
smtp.Port = 587;
smtp.DeliveryMethod = SmtpDeliveryMethod.Network;
smtp.UseDefaultCredentials = false;
smtp.Credentials = new System.Net.NetworkCredential("<EMAIL>", "009100165641rrRRR");// Enter senders User name and password
smtp.EnableSsl = true;
smtp.Send(mail);
return Redirect("/Email/EmailSuccess");
}
catch (Exception)
{
ViewBag.Status = "Problem while sending email, Please check details.";
}
return null;
}
}
}
<file_sep>/MyPortpholio/Scripts/DetailJob.js
function showDetail(namechildid) {
document.getElementById('showdetail').className ='showdetailNew';
$( "#showdetail" ).show( "fade");
var name = document.getElementById(namechildid).parentNode.id;
// console.log(document.getElementById(namechildid).parentNode.childNodes('p'));
var i = document.getElementById(name).style.backgroundImage;
document.getElementById('showdetail-content').style.backgroundImage = i;
document.getElementById('showdetail-content').style.backgroundSize = "contain";
document.getElementById('showdetail-content').style.backgroundRepeat = "no-repeat";
document.getElementById('showdetail-content').style.backgroundPosition = "center";
var description = document.getElementById('description-in-showdetail-container');
var descriptionh1 = document.getElementById('description-in-showdetail-container-h1');
switch (namechildid) {
case "1":
descriptionh1.innerHTML = "Site-portfolio";
description.innerHTML = " This site is for me, my development and also for people who want to learn something new for themselves.";
break;
case "2":
descriptionh1.innerHTML = "Site-parsing";
description.innerHTML = "Parsing site of Formula 1. Parsing race results.";
break;
case "3":
descriptionh1.innerHTML = "R.H.I.V.W.U.N.N";
description.innerHTML = "Software implementation of information technology is created in C # in the development environment Visual Studio 2015. The program provides a pleasant and intuitive interface and contains all the necessary documentation and instructions for proper use. To study the neural network used a representative sample of the cost of various apartments in different parts of the city of Vinnytsia, taken from the site dom.ria.ua";
break;
case "4":
descriptionh1.innerHTML = "Spichki";
description.innerHTML = "Board game. A game that will help you distract from work and routine. In this game two modes are implemented, named Two Players and PC Game. You can see your results in the statistic of the game. There is a training section which helps you to use the game correctly.";
break;
case "5":
descriptionh1.innerHTML = "Style-picker";
description.innerHTML = "This site will help to determine the interier of your own house.";
break;
case "6":
descriptionh1.innerHTML = "Integration";
description.innerHTML = "This site was created for the customer.";
break;
case "7":
descriptionh1.innerHTML = "ZombieCar";
description.innerHTML = "Now it's time to face the apocalypse and blow the crap out of thousands of zombies! The goal is to survive... but you wont. How long will you last? It's you, you're heavily armed passenger, against a boatload of super-strong, attacking zombies. Improve your skills. Forget the boring dialogs and the endless words. Is all about the bloody action and the fun to kill zombies in many crazy ways.";
break;
}
// if(namechildid==1)
// {
// description.innerHTML = "111111";
// }
// if(namechildid==2)
// {
// description.innerHTML = "111111";
// }
}
function onClickExit() {
$( "#showdetail" ).hide( "fade");
} | d2c065f2e693682aa6cfd98cddad0c3fb98f6380 | [
"JavaScript",
"C#"
] | 3 | JavaScript | Roman-Suprun/My_.NET_Portpholio | 7373aa7ff79e148b81d82a765f36b513e9ecec29 | 7c0b03575f1ba7b4b602fa2222f7cd3f2fed4b89 |
refs/heads/master | <repo_name>digitalbs/recipebook<file_sep>/src/app/Login/main/index.js
'use strict';
import mainTemplate from './index.html';
/**
* @module boilerplateApp.main
* @class LoginMainCtrl
* @constructor
*/
export default function LoginMainCtrl(AuthenticateUserResource, $state, $rootScope, jwtHelper) {
this.$rootScope = $rootScope;
this.AuthenticateUserResource = AuthenticateUserResource;
this.$state = $state;
this.jwtHelper = jwtHelper;
}
LoginMainCtrl.prototype.logUserIn = function () {
this.loginForm.username.$setValidity('incorrectCredentials', true);
this.loginForm.password.$setValidity('incorrectPassword', true);
this.AuthenticateUserResource.save(this.login).$promise.then(res => {
localStorage.setItem('id_token', res.id_token);
const tokenPayload = this.jwtHelper.decodeToken(res.id_token);
this.$rootScope.isAuthenticated = true;
this.$state.reload();
this.$state.go('app.recipes', {
username: tokenPayload.username
});
}).catch(err => {
console.log(err);
if (err.data.message === 'incorrectPassword') {
this.loginForm.password.$setValidity(err.data.message, false);
} else if (err.data.message === 'incorrectCredentials') {
this.loginForm.username.$setValidity(err.data.message, false);
}
});
};
export default {
templateUrl: mainTemplate,
bindings: {},
controllerAs: 'ctrl',
controller: LoginMainCtrl
};
LoginMainCtrl.$inject = ['AuthenticateUserResource', '$state', '$rootScope', 'jwtHelper'];
<file_sep>/src/app/config/app.modules.js
'use strict';
//Modules
import '../Login';
import '../Register';
import '../Admin';
import '../Header';
import '../Recipes';
import '../Common/resources';
export default [
'rn.login',
'rn.register',
'rn.admin',
'rn.header',
'rn.recipes',
'rn.common.resources'
];
<file_sep>/src/app/Register/main/index.test.js
import '../../Register';
describe('component: Register', () => {
let component;
let scope;
let $componentController;
beforeEach(angular.mock.module('rn.register'));
beforeEach(() => {
inject(($rootScope, _$componentController_) => {
scope = $rootScope.$new();
$componentController = _$componentController_;
component = $componentController('register', {
$scope: scope
});
});
});
it('should instantiate the Register component', function () {
expect(component).toBeDefined();
});
describe('submit', () => {
it('will call the register user resource', () => {
});
});
});
<file_sep>/src/app/routing/app.js
/**
* Route config for Home
*/
const ROUTES = {
'app': {
url: '',
views: {
'header': {
template: '<header></header>'
},
'perspective': {
template: '<md-content layout="column" layout-align="center" ui-view="content" class="md-padding"></md-content>'
}
}
}
};
export default ROUTES;
<file_sep>/src/app/Register/index.js
import register from './main';
angular.module('rn.register', [])
.component('register', register);
<file_sep>/src/app/Register/main/index.js
'use strict';
import RegisterTemplate from './index.html';
/**
* @module slush-angular-pack.Register
* @class RegisterCtrl
* @constructor
*/
export default function RegisterCtrl(UsersResource, $state, $rootScope) {
this.$rootScope = $rootScope;
this.UsersResource = UsersResource;
this.$state = $state;
}
RegisterCtrl.prototype.registerUser = function () {
this.UsersResource.save(this.register).$promise.then(res => {
localStorage.setItem('id_token', res.id_token);
this.$rootScope.isAuthenticated = true;
this.$state.go('app.recipes', {username: res.username});
}).catch(err => {
if (err.data.message === 'teamnameTaken') {
this.registerForm.teamname.$setValidity(err.data.message, false);
} else if (err.data.message === 'usernameTaken') {
this.registerForm.username.$setValidity(err.data.message, false);
}
});
};
export default {
templateUrl: RegisterTemplate,
bindings: {},
controllerAs: 'ctrl',
controller: RegisterCtrl
};
RegisterCtrl.$inject = ['UsersResource', '$state', '$rootScope'];
<file_sep>/integration/register.spec.js
'use strict';
describe('Register', () => {
it('should should navigate to the register page', () => {
browser.get('http://localhost:8000/#');
$('#link-register').click();
expect(browser.getCurrentUrl()).toBe('http://localhost:8000/#/register');
});
it('should register a random user', () => {
let userName = `user${Math.floor(Math.random() * 3000000000)}`;
$('#username').sendKeys(userName);
$('#password').sendKeys('<PASSWORD>');
$('button[type=submit]').click();
expect(browser.getCurrentUrl()).toBe(`http://localhost:8000/#/${userName}/recipes`);
});
});
<file_sep>/src/app/Recipes/index.js
import recipes from './main';
angular.module('rn.recipes', [])
.component('recipes', recipes);
<file_sep>/src/app/Common/resources/AuthenticateUserResource.js
'use strict';
/**
* @module kochen.common.resources
* @class AuthenticateUserResource
*/
/**
* Register User endpoint.
*
* @method AuthenticateUserResource
* @param {Object} $resource Angular $resource object. Link: http://docs.angularjs.org/api/ngResource/service/$resource
* @param {string} API_URL Url pointing to API layer
* @return {Object} Returns a resource object (GET, QUERY, SAVE, REMOVE, DELETE, Custom)
*
*/
export default function AuthenticateUserResource($resource, API_URL) {
return $resource(API_URL + '/users/authenticate');
}
AuthenticateUserResource.$inject = ['$resource', 'API_URL'];
<file_sep>/src/app/Login/main/index.html
<section layout="row" layout-fill layout-align="center">
<div flex="90" layout-padding layout="column">
<h2>{{ "login.headline" | translate }}</h2>
<form ng-submit="ctrl.logUserIn()" name="ctrl.loginForm">
<p>{{ "login.welcome" | translate }}</p>
<div layout="row">
<md-input-container flex>
<label>{{ "form.username" | translate }}</label>
<input ng-model="ctrl.login.username" name="username"/>
<div ng-messages="ctrl.loginForm.username.$error">
<div ng-message="required">{{ "form.required" | translate }}</div>
<div ng-message="incorrectCredentials">Username is Incorrect</div>
</div>
</md-input-container>
</div>
<div layout="row">
<md-input-container flex>
<label>{{ "form.password" | translate }}</label>
<input ng-model="ctrl.login.password" name="password" type="<PASSWORD>"/>
<div ng-messages="ctrl.loginForm.password.$error">
<div ng-message="incorrectPassword">Password is Incorrect</div>
</div>
</md-input-container>
</div>
<div layout="row">
<md-button class="md-primary md-raised" type="submit">{{"button.submit" | translate}}</md-button>
</div>
</form>
<p>{{ "login.notRegistered" | translate }} <a id="link-register" ui-sref="app.register">{{ "login.registerHere" | translate }}</a></p>
</div>
</section>
<file_sep>/src/app/config/app.config.js
'use strict';
module.exports = {
/* jshint ignore:start */
'debug': rn_debug,
'html5Routing': rn_html5Routing,
'urlPrefix': rn_urlPrefix,
'API_URL': rn_API_URL
};
<file_sep>/src/app/routing/login.js
/**
* Route config for Home
*/
const ROUTES = {
'app.login': {
url: '/login',
views: {
'content': {
template: '<login></login>'
}
}
}
};
export default ROUTES;
<file_sep>/src/app/Header/index.js
import header from './main';
angular.module('rn.header', [])
.component('header', header);
<file_sep>/src/app/routing/register.js
/**
* Route config for Home
*/
const ROUTES = {
'app.register': {
url: '/register',
views: {
'content': {
template: '<register></register>'
}
}
}
};
export default ROUTES;
<file_sep>/src/app/app.js
'use strict';
//Vendors
import vendors from './config/app.vendors';
//kochen Modules
import modules from './config/app.modules';
//Configuration
import config from './config/app.config';
//Routing
import routing from './routing';
//Language
import englishUS from '../i18n/lang/en-us';
// Vendors CSS
/* NOTE: it is recommended to load css via scss @import <location>, however,
if the css includes url paths to assets, but no url path override variable,
it should be loaded here so webpack picks it up.
*/
const modulesToLoad = modules.concat(vendors);
angular.module('rn', modulesToLoad)
.config(['$stateProvider', '$urlRouterProvider', '$httpProvider', '$logProvider', 'jwtOptionsProvider', '$locationProvider', function ($stateProvider, $urlRouterProvider, $httpProvider, $logProvider, jwtOptionsProvider, $locationProvider) {
$logProvider.debugEnabled(config.debug);
jwtOptionsProvider.config({
whiteListedDomains: ['localhost'],
unauthenticatedRedirectPath: '/login',
tokenGetter() {
return localStorage.getItem('id_token');
}
});
$httpProvider.interceptors.push('jwtInterceptor');
$httpProvider.defaults.useXDomain = true;
delete $httpProvider.defaults.headers.common['X-Requested-With'];
routing($stateProvider);
$urlRouterProvider.when('', '/');
$urlRouterProvider.when('/', ['$state', '$rootScope', ($state, $rootScope) => {
if ($rootScope.isAuthenticated) {
$state.go('app.recipes');
} else {
$state.go('app.login');
}
}]);
$urlRouterProvider.otherwise('/');
$locationProvider.html5Mode({
enabled: false,
requireBase: false
});
}])
.config(['$translateProvider', ($translateProvider) => {
$translateProvider.useMessageFormatInterpolation();
$translateProvider.useSanitizeValueStrategy('sanitize');
$translateProvider.translations('en-us', englishUS);
$translateProvider.preferredLanguage('en-us');
}])
.config(['$mdThemingProvider', ($mdThemingProvider) => {
$mdThemingProvider.theme('default')
.primaryPalette('indigo')
.accentPalette('grey');
}])
.run(['authManager', '$rootScope', '$state', '$location', (authManager, $rootScope, $state, $location) => {
if (localStorage.getItem('id_token')) {
$rootScope.isAuthenticated = true;
} else {
$rootScope.isAuthenticated = false;
}
authManager.checkAuthOnRefresh();
authManager.redirectWhenUnauthenticated();
$rootScope.logOut = function () {
localStorage.removeItem('id_token');
$state.reload();
};
$rootScope.$on('$locationChangeStart', (ev, next, current) => {
const publicPages = ['/login', '/register'];
const restrictedPage = publicPages.indexOf($location.path()) === -1;
if (restrictedPage && !localStorage.getItem('id_token')) {
$location.path('/login');
}
});
}])
.constant('API_URL', config.API_URL);
<file_sep>/src/app/routing/admin.js
/**
* Route config for Admin
*/
const ROUTES = {
'app.admin': {
url: '/admin',
views: {
'content': {
template: '<admin></admin>'
}
}
}
};
export default ROUTES;
| eec8bb7c935929df0c461b370f96478d341683f1 | [
"JavaScript",
"HTML"
] | 16 | JavaScript | digitalbs/recipebook | 0d60afb63f2613303bb5893c629d6d8d6f789a3c | 17eb3ee22daee26813bbb4e17d80a3ca3eca3bab |
refs/heads/master | <repo_name>yaroslavNikonorov/ArduinoUdoDriver<file_sep>/UdoDriver/UdoDriver.ino
#include "Wire.h"
#define DS3231_I2C_ADDRESS 0x68
#include <Servo.h>
#include <LiquidCrystal.h>
#include <EEPROM.h>
//initialize lcd
LiquidCrystal lcd(12, 11, 7, 4, 3, 2);
//Display params
void (*printLCD)();
boolean displayUpdated=false;
//Button pins
#define DEBOUNCE 50
#define BTN_ONE A1
#define BTN_TWO A2
#define BTN_THREE A3
byte buttons[] = {BTN_ONE, BTN_TWO, BTN_THREE};
#define NUMBUTTONS sizeof(buttons)
boolean pressed[] = {false, false, false};
long pressedTime[] = {0,0,0};
//Light
#define LED_PIN 13
int ledState=LOW;
//constants level one
#define CLOCK 0
#define MENU 1
//constants level two
#define CLOCK_SETTINGS 0
#define SERVO_SETTINGS 1
#define LIGHT_SETTINGS 2
//constants level three
#define DONOTEDIT 0
#define EDIT 1
#define SELECTED 2
//const
#define NOTHING 255
//Menu tree
byte menuTree[]={CLOCK,CLOCK_SETTINGS,NOTHING,NOTHING,NOTHING};
//levels
#define LEVEL_ONE 0
#define LEVEL_TWO 1
#define LEVEL_THREE 2
#define LEVEL_FOUR 3
#define LEVEL_FIVE 4
//Main menu
String mainMenu[]={"Clock settings", "Servo settings", "Light settings"};
#define NUMMAINMENU 3
//Clock menu
long timeUpdated=millis();
#define TIME_UPDATE_INTERVAL 500
byte second, minute, hour, dayOfWeek, dayOfMonth, month, year;
#define SECOND 6
#define MINUTE 5
#define HOUR 4
#define DAYOFWEEK 0
#define DAYOFMONTH 1
#define MONTH 2
#define YEAR 3
byte clockEdit[]={0,0,0,0,0,0,0};
#define CLOCK_EDIT sizeof(clockEdit)
#define BLINK_INTERVAL 500
long lastTimeBlinked=millis();
String dateString="";
String timeString="";
boolean clockEditBlink=true;
//Light menu
String lightMenu[]={"Turn off", "Turn on"};
#define NUMLIGHTMENU 2
byte lightState=EEPROM.read(0);
//Servo settings
#define SERVO_ONE 0
#define SERVO_TWO 1
#define SERVO_THREE 2
#define SERVO_FOUR 3
#define SERVO_ONE_PIN 5
#define SERVO_TWO_PIN 6
#define SERVO_THREE_PIN 9
#define SERVO_FOUR_PIN 10
#define SERVO_ENABLE 1
#define SERVO_DISABLE 0
#define SERVO_ISENABLED 0
#define SERVO_DAY 1
#define SERVO_HOUR 2
#define SERVO_MINUTE 3
#define SERVO_ANGEL 4
#define SERVO_NUMBEROFTIMES 5
#define SERVO_RUNNOW 6
Servo servos[4];
String servoSettingsString[]={"Enabled", "Day", "Hour", "Minute", "Angel", "Times", "Run now"};
byte editServoSetting=0;
#define SERVO_COUNT 4
#define SERVO_SETTINGS_COUNT 7
#define SERVO_TIMEOUT 1000
byte servoState[]={0,0,0,0};
byte runServo[]={0,0,0,0};
long servoStateChanged[]={0,0,0,0};
void setup()
{
Wire.begin();
//setDS3231time(0, 47, 23, 3, 11, 8, 15);
pinMode(LED_PIN, OUTPUT);
digitalWrite(LED_PIN, lightState);
lcd.begin(16, 2);
//buttons
for(byte i=0;i<NUMBUTTONS;i++){
pinMode(buttons[i], INPUT);
}
printLCD=&showCurrentDateTime;
displayUpdated=true;
//Servo
servos[SERVO_ONE].attach(SERVO_ONE_PIN);
servos[SERVO_ONE].write(0);
servos[SERVO_TWO].attach(SERVO_TWO_PIN);
servos[SERVO_TWO].write(0);
servos[SERVO_THREE].attach(SERVO_THREE_PIN);
servos[SERVO_THREE].write(0);
servos[SERVO_FOUR].attach(SERVO_FOUR_PIN);
servos[SERVO_FOUR].write(0);
}
void buttonHandler(){
for(byte i=0; i<NUMBUTTONS;i++){
if(digitalRead(buttons[i]) == HIGH){
if(millis()-pressedTime[i]>DEBOUNCE){
pressed[i]=true;
}
}else{
pressed[i]=false;
pressedTime[i]=millis();
}
}
}
void menuHandler(){
for(byte i=0;i<NUMBUTTONS;i++){
if(pressed[i]){
switch(i){
case 0:
btnOneHandler();
break;
case 1:
btnTwoHandler();
break;
case 2:
btnThreeHandler();
break;
}
delay(200);
}
}
}
void btnOneHandler(){
switch(menuTree[LEVEL_ONE]){
case CLOCK:
menuTree[LEVEL_ONE]=MENU;
printLCD=&showMenu;
break;
case MENU:
menuTree[LEVEL_ONE]=CLOCK;
printLCD=&showCurrentDateTime;
break;
}
menuTree[LEVEL_THREE]=NOTHING;
menuTree[LEVEL_FOUR]=NOTHING;
menuTree[LEVEL_FIVE]=NOTHING;
displayUpdated=true;
}
void btnTwoHandler(){
if(menuTree[LEVEL_ONE]==MENU){
if(menuTree[LEVEL_THREE]==NOTHING){
switchMenu();
} else {
runSettingHandler(BTN_TWO);
}
}
}
void btnThreeHandler(){
if(menuTree[LEVEL_ONE]==MENU){
if(menuTree[LEVEL_THREE]==NOTHING){
menuTree[LEVEL_THREE]=DONOTEDIT;
runSettingHandler(BTN_THREE);
}else{
runSettingHandler(BTN_THREE);
}
}
}
void runSettingHandler(byte btn){
switch(menuTree[LEVEL_TWO]){
case CLOCK_SETTINGS:
clockSettingHandler(btn);
break;
case SERVO_SETTINGS:
servoSettingHandler(btn);
break;
case LIGHT_SETTINGS:
lightSettingHandler(btn);
break;
}
}
void clockSettingHandler(byte btn){
if(menuTree[LEVEL_THREE]==DONOTEDIT&&menuTree[LEVEL_FOUR]!=NOTHING&&btn==BTN_TWO){
switch(menuTree[LEVEL_FOUR]){
case DAYOFWEEK:
menuTree[LEVEL_FOUR]=DAYOFMONTH;
break;
case DAYOFMONTH:
menuTree[LEVEL_FOUR]=MONTH;
break;
case MONTH:
menuTree[LEVEL_FOUR]=YEAR;
break;
case YEAR:
menuTree[LEVEL_FOUR]=HOUR;
break;
case HOUR:
menuTree[LEVEL_FOUR]=MINUTE;
break;
case MINUTE:
menuTree[LEVEL_FOUR]=SECOND;
break;
case SECOND:
menuTree[LEVEL_FOUR]=DAYOFWEEK;
break;
}
}else if(menuTree[LEVEL_THREE]==DONOTEDIT&&menuTree[LEVEL_FOUR]==NOTHING&&btn==BTN_THREE){
byte second, minute, hour, dayOfWeek, dayOfMonth, month, year;
readDS3231time(&clockEdit[6], &clockEdit[5], &clockEdit[4], &clockEdit[0], &clockEdit[1], &clockEdit[2],&clockEdit[3]);
//clockEdit={dayOfWeek,dayOfMonth,month,year,hour,minute,second};
menuTree[LEVEL_FOUR]=DAYOFWEEK;
printLCD=&showClockSettings;
}else if(menuTree[LEVEL_THREE]==EDIT&&btn==BTN_TWO){
editClockSettings();
}else if(menuTree[LEVEL_THREE]==EDIT&&btn==BTN_THREE){
saveClockSettings();
menuTree[LEVEL_THREE]=DONOTEDIT;
printLCD=&showClockSettings;
}else if(menuTree[LEVEL_THREE]==DONOTEDIT&&btn==BTN_THREE){
printLCD=&showEditClockSettings;
displayUpdated=true;
menuTree[LEVEL_THREE]=EDIT;
}else{
printLCD=&showClockSettings;
}
}
void editClockSettings(){
byte type=menuTree[LEVEL_FOUR];
switch(type){
case SECOND:
case MINUTE:
clockEdit[type]=++clockEdit[type]%60;
break;
case HOUR:
clockEdit[type]=++clockEdit[type]%24;
break;
case DAYOFWEEK:
clockEdit[type]=1+clockEdit[type]%7;
break;
case DAYOFMONTH:
clockEdit[type]=1+clockEdit[type]%31;
break;
case MONTH:
clockEdit[type]=1+clockEdit[type]%12;
break;
case YEAR:
clockEdit[type]=1+clockEdit[type]%20;
break;
}
displayUpdated=true;
}
void saveClockSettings(){
setDS3231time(clockEdit[menuTree[LEVEL_FOUR]], menuTree[LEVEL_FOUR]);
}
void showEditClockSettings(){
if(displayUpdated){
String showString;
if(menuTree[LEVEL_FOUR]==DAYOFWEEK){
showString=getDayOfWeek(clockEdit[menuTree[LEVEL_FOUR]]);
} else{
showString=String(clockEdit[menuTree[LEVEL_FOUR]]);
}
lcd.clear();
lcd.print(showString);
displayUpdated=false;
}
}
void showClockSettings(){
if(millis()-lastTimeBlinked>BLINK_INTERVAL){
String dateTime[7];
for(byte i=0;i<CLOCK_EDIT;i++){
if(i==DAYOFWEEK){
dateTime[i]=getDayOfWeek(clockEdit[i]);
} else {
dateTime[i]=String(clockEdit[i]);
}
}
if(clockEditBlink==true){
dateTime[menuTree[LEVEL_FOUR]]=getEmptyString(dateTime[menuTree[LEVEL_FOUR]]);
clockEditBlink=false;
}else{
clockEditBlink=true;
}
String date = dateTime[DAYOFWEEK]+" "+dateTime[DAYOFMONTH]+"/"+dateTime[MONTH]+"/"+dateTime[YEAR];
lcd.setCursor(0,0);
lcd.print(date);
String time = dateTime[HOUR]+":"+dateTime[MINUTE]+":"+dateTime[SECOND];
lcd.setCursor(0,1);
lcd.print(time);
lastTimeBlinked=millis();
}
}
String getEmptyString(String str){
String empty="";
for(byte i=0;i<str.length();i++){
empty+=" ";
}
return empty;
}
//Servo
//void servoSettingHandler(byte btn){}
void servoSettingHandler(byte btn){
if(menuTree[LEVEL_THREE]==DONOTEDIT&&menuTree[LEVEL_FOUR]!=NOTHING&&menuTree[LEVEL_FIVE]==NOTHING&&btn==BTN_TWO){
menuTree[LEVEL_FOUR]=++menuTree[LEVEL_FOUR]%SERVO_COUNT;
displayUpdated=true;
}else if(menuTree[LEVEL_THREE]==DONOTEDIT&&menuTree[LEVEL_FOUR]==NOTHING&&menuTree[LEVEL_FIVE]==NOTHING&&btn==BTN_THREE){
menuTree[LEVEL_FOUR]=SERVO_ONE;
printLCD=&chooseServoShow;
displayUpdated=true;
}else if(menuTree[LEVEL_THREE]==DONOTEDIT&&menuTree[LEVEL_FOUR]!=NOTHING&&menuTree[LEVEL_FIVE]==NOTHING&&btn==BTN_THREE){
menuTree[LEVEL_THREE]=SELECTED;
menuTree[LEVEL_FIVE]=SERVO_ISENABLED;
displayUpdated=true;
printLCD=&servoSettingsShow;
}else if(menuTree[LEVEL_THREE]==SELECTED&&menuTree[LEVEL_FOUR]!=NOTHING&&menuTree[LEVEL_FIVE]!=NOTHING&&btn==BTN_TWO){
menuTree[LEVEL_FIVE]=++menuTree[LEVEL_FIVE]%SERVO_SETTINGS_COUNT;
displayUpdated=true;
}else if(menuTree[LEVEL_THREE]==SELECTED&&menuTree[LEVEL_FOUR]!=NOTHING&&menuTree[LEVEL_FIVE]!=NOTHING&&btn==BTN_THREE){
if(menuTree[LEVEL_FIVE]==SERVO_RUNNOW){
runServo[menuTree[LEVEL_FOUR]]=getServoSetting(menuTree[LEVEL_FOUR], SERVO_NUMBEROFTIMES);
}else{
menuTree[LEVEL_THREE]=EDIT;
editServoSetting=getServoSetting(menuTree[LEVEL_FOUR], menuTree[LEVEL_FIVE]);
printLCD=&servoSettingsEditShow;
displayUpdated=true;
}
}else if(menuTree[LEVEL_THREE]==EDIT&&menuTree[LEVEL_FOUR]!=NOTHING&&menuTree[LEVEL_FIVE]!=NOTHING&&btn==BTN_TWO){
servoSettingsEdit();
displayUpdated=true;
}else if(menuTree[LEVEL_THREE]==EDIT&&menuTree[LEVEL_FOUR]!=NOTHING&&menuTree[LEVEL_FIVE]!=NOTHING&&btn==BTN_THREE){
servoSettingsSave();
menuTree[LEVEL_THREE]=SELECTED;
displayUpdated=true;
printLCD=&servoSettingsShow;
}
}
void chooseServoShow(){
if(displayUpdated){
lcd.clear();
lcd.print("Servo "+String(1+menuTree[LEVEL_FOUR]));
displayUpdated=false;
}
}
void servoSettingsShow(){
if(displayUpdated){
String settings;
if(menuTree[LEVEL_FIVE]!=SERVO_RUNNOW){
settings=servoSettingsString[menuTree[LEVEL_FIVE]]+": "+String(getServoSetting(menuTree[LEVEL_FOUR], menuTree[LEVEL_FIVE]));
}else{
settings=servoSettingsString[menuTree[LEVEL_FIVE]];
}
lcd.clear();
lcd.print(settings);
displayUpdated=false;
}
}
void servoSettingsEditShow(){
if(displayUpdated){
lcd.clear();
lcd.print(editServoSetting);
displayUpdated=false;
}
}
void servoSettingsEdit(){
editServoSetting=validateServo(menuTree[LEVEL_FIVE], ++editServoSetting);
}
void servoSettingsSave(){
setServoSetting(menuTree[LEVEL_FOUR], menuTree[LEVEL_FIVE], editServoSetting);
}
byte validateServo(byte type, byte value){
switch(type){
case SERVO_ISENABLED:
value=value%2;
break;
case SERVO_DAY:
value=value%32;
break;
case SERVO_HOUR:
value=value%24;
break;
case SERVO_MINUTE:
value=value%60;
break;
case SERVO_ANGEL:
value=value%91;
break;
case SERVO_NUMBEROFTIMES:
value=value%20;
break;
}
return value;
}
//Light
void lightSettingHandler(byte btn){
if(menuTree[LEVEL_THREE]==EDIT&&btn==BTN_TWO){
switch(lightState){
case HIGH:
lightState=LOW;
break;
case LOW:
lightState=HIGH;
break;
}
}else if(menuTree[LEVEL_THREE]==DONOTEDIT&&btn==BTN_THREE){
menuTree[LEVEL_THREE]=EDIT;
printLCD=&showLightSettings;
}else if(menuTree[LEVEL_THREE]==EDIT&&btn==BTN_THREE){
digitalWrite(LED_PIN, lightState);
EEPROM.write(0, lightState);
}
displayUpdated=true;
}
void showLightSettings(){
if(displayUpdated==true){
lcd.clear();
lcd.print(lightMenu[lightState]);
displayUpdated=false;
}
}
void showCurrentDateTime(){
String currentDateString=getDateString();
String currentTimeString=getTimeString();
if(displayUpdated==true||currentDateString!=dateString){
dateString=currentDateString;
lcd.setCursor(0,0);
lcd.print(dateString);
}
if(displayUpdated==true||currentTimeString!=timeString){
timeString=currentTimeString;
lcd.setCursor(0,1);
lcd.print(timeString);
}
displayUpdated=false;
}
void showMenu(){
if(displayUpdated){
lcd.clear();
lcd.print(mainMenu[menuTree[LEVEL_TWO]]);
displayUpdated=false;
}
}
void switchMenu(){
switch(menuTree[LEVEL_TWO]){
case CLOCK_SETTINGS:
menuTree[LEVEL_TWO]=SERVO_SETTINGS;
break;
case SERVO_SETTINGS:
menuTree[LEVEL_TWO]=LIGHT_SETTINGS;
break;
case LIGHT_SETTINGS:
menuTree[LEVEL_TWO]=CLOCK_SETTINGS;
break;
}
displayUpdated=true;
}
byte getEEPROMAddress(byte servoNum, byte settingsNum){
return 10+servoNum*SERVO_SETTINGS_COUNT+settingsNum;
}
byte getServoSetting(byte servoNum, byte settingsNum){
return EEPROM.read(getEEPROMAddress(servoNum, settingsNum));
}
void setServoSetting(byte servoNum, byte settingsNum, byte setting){
EEPROM.write(getEEPROMAddress(servoNum, settingsNum), setting);
}
void setDS3231time(byte dateTime, byte type)
{
// sets time and date data to DS3231
Wire.beginTransmission(DS3231_I2C_ADDRESS);
switch(type){
case SECOND:
Wire.write(0);
Wire.write(decToBcd(dateTime)); // set seconds
break;
case MINUTE:
Wire.write(1);
Wire.write(decToBcd(dateTime)); // set minutes
break;
case HOUR:
Wire.write(2);
Wire.write(decToBcd(dateTime)); // set hours
break;
case DAYOFWEEK:
Wire.write(3);
Wire.write(decToBcd(dateTime)); // set day of week (1=Sunday, 7=Saturday)
break;
case DAYOFMONTH:
Wire.write(4);
Wire.write(decToBcd(dateTime)); // set date (1 to 31)
break;
case MONTH:
Wire.write(5);
Wire.write(decToBcd(dateTime)); // set month
break;
case YEAR:
Wire.write(6);
Wire.write(decToBcd(dateTime)); // set year (0 to 99)
break;
}
Wire.endTransmission();
}
void readDS3231time(byte *second,
byte *minute,
byte *hour,
byte *dayOfWeek,
byte *dayOfMonth,
byte *month,
byte *year)
{
Wire.beginTransmission(DS3231_I2C_ADDRESS);
Wire.write(0); // set DS3231 register pointer to 00h
Wire.endTransmission();
Wire.requestFrom(DS3231_I2C_ADDRESS, 7);
// request seven bytes of data from DS3231 starting from register 00h
*second = bcdToDec(Wire.read() & 0x7f);
*minute = bcdToDec(Wire.read());
*hour = bcdToDec(Wire.read() & 0x3f);
*dayOfWeek = bcdToDec(Wire.read());
*dayOfMonth = bcdToDec(Wire.read());
*month = bcdToDec(Wire.read());
*year = bcdToDec(Wire.read());
}
String getTimeString()
{
byte second, minute, hour, dayOfWeek, dayOfMonth, month, year;
// retrieve data from DS3231
readDS3231time(&second, &minute, &hour, &dayOfWeek, &dayOfMonth, &month,
&year);
String minn;
if (minute<10)
{
minn = "0"+String(minute);
} else {
minn = String(minute);
}
String sec;
if (second<10)
{
sec="0"+String(second);
} else{
sec = String(second);
}
return String(hour)+":"+minn+":"+sec;
}
String getDateString()
{
byte second, minute, hour, dayOfWeek, dayOfMonth, month, year;
// retrieve data from DS3231
readDS3231time(&second, &minute, &hour, &dayOfWeek, &dayOfMonth, &month,
&year);
return String(getDayOfWeek(dayOfWeek)) +" "+String(dayOfMonth)+"/"+String(month)+"/"+String(year);
}
String getDayOfWeek(byte dayOfWeek){
String week="";
switch(dayOfWeek){
case 1:
week="Sunday";
break;
case 2:
week="Monday";
break;
case 3:
week="Tuesday";
break;
case 4:
week="Wednesday";
break;
case 5:
week="Thursday";
break;
case 6:
week="Friday";
break;
case 7:
week="Saturday";
break;
}
return week;
}
byte decToBcd(byte val)
{
return( (val/10*16) + (val%10) );
}
// Convert binary coded decimal to normal decimal numbers
byte bcdToDec(byte val)
{
return( (val/16*10) + (val%16) );
}
void checkServoTime(){
byte second, minute, hour, dayOfWeek, dayOfMonth, month, year;
// retrieve data from DS3231
readDS3231time(&second, &minute, &hour, &dayOfWeek, &dayOfMonth, &month,
&year);
for(byte i=0;i<SERVO_COUNT;i++){
byte servoDay=getServoSetting(i, SERVO_DAY);
byte servoHour=getServoSetting(i, SERVO_HOUR);
byte servoMinute=getServoSetting(i, SERVO_MINUTE);
byte servoEnabled=getServoSetting(i, SERVO_ISENABLED);
if(servoEnabled>0&&servoDay!=dayOfMonth&&servoHour==hour&&servoMinute==minute){
runServo[i]=getServoSetting(i, SERVO_NUMBEROFTIMES);
setServoSetting(i, SERVO_DAY, dayOfMonth);
}
}
}
void servoHandler(){
checkServoTime();
for(byte i=0;i<SERVO_COUNT;i++){
if(servoState[i]>0&&millis()-servoStateChanged[i]>SERVO_TIMEOUT){
servos[i].write(0);
servoState[i]=0;
servoStateChanged[i]=millis();
}
if(runServo[i]>0&&millis()-servoStateChanged[i]>SERVO_TIMEOUT){
byte angel=getServoSetting(i, SERVO_ANGEL);
servos[i].write(angel);
servoState[i]=angel;
servoStateChanged[i]=millis();
runServo[i]--;
}
}
}
void loop()
{
buttonHandler();
menuHandler();
printLCD();
servoHandler();
}
| 4243f109ec60d5eadd3f4977ea0f7340d0dd7255 | [
"C++"
] | 1 | C++ | yaroslavNikonorov/ArduinoUdoDriver | e7eac4547c497308421547e82d411a78cefbb0bd | 4f0a6bc84e44b78e16c13032857ed54867fcad4d |
refs/heads/master | <file_sep>var app = require('koa')();
var Co = require('co');
var Router = require('koa-router')();
const send = require('koa-send');
const serve = require('koa-static');
const Jade = require('koa-jade');
const Port = process.env.PORT || 3000;
const jade = new Jade({
viewPath: './views',
})
app.use(jade.middleware)
Router.get('/', function *(){
this.render('index');
})
app.use(serve('public'));
app.use(Router.routes());
app.listen(Port);
console.log("listen to port " + Port);<file_sep>angular.module("myResume", [])
.controller("HomeCtrl", function ($scope) {
$scope.works= [
{ "time": "Sep, 2009 to March, 2013", "company": "Ivenue.com Network Co.",
"position": "Tech Suport/Sales Representative", "description": "blabalbal"},
{ "time": "Sep, 2009 to March, 2013", "company": "Ivenue.com Network Co.",
"position": "Tech Suport/Sales Representative", "description": "blabalbal"},
{ "time": "Sep, 2009 to March, 2013", "company": "Ivenue.com Network Co.",
"position": "Tech Suport/Sales Representative", "description": "blabalbal"},
];
$scope.login = function () {
};
});<file_sep>## <NAME> Resume Website | e2cbddecb425376d17a46016face3bb6988697e1 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | franklin-huang/resume | 53f41cb92c9ecf3f203d8a016832a05d554e3ddb | 8237a00d527d55609853fb29afeff982eb1d271d |
refs/heads/master | <file_sep>
package ec.espe.distri.common.dao;
import java.io.Serializable;
import java.util.List;
public interface GenericDAO<T, ID extends Serializable> {
T findById(ID id, boolean lock);
List<T> findAll();
List<T> find(T entityExample, Boolean... maxRegistrosConsulta);
Integer count(T entityExample);
void insert(T entity);
T update(T entity);
void makeTransient(T entity);
void remove(T entity);
void ejecutarNativo(String sentencia);
T bloquearEscritura(T entidad);
void refresh(T entidad);
void flush();
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ec.espe.distri.web;
import ec.espe.distri.modelo.Movimiento;
import ec.espe.distri.servicios.MovimientoServicio;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ManagedProperty;
import javax.faces.bean.ViewScoped;
/**
*
* @author RAUL
*/
@ViewScoped
@ManagedBean
public class MovimientoBean implements Serializable {
@EJB
private MovimientoServicio movimientoServicio;
private List<Movimiento> movimientos;
private Integer cantidad;
private Date fechaInicio;
private Date fechaFin;
private String seleccion;
@ManagedProperty(value = "#{loginBean}")
private LoginBean datosLogin;
public List<Movimiento> getMovimientos() {
return movimientos;
}
public void setMovimientos(List<Movimiento> movimientos) {
this.movimientos = movimientos;
}
public LoginBean getDatosLogin() {
return datosLogin;
}
public void setDatosLogin(LoginBean datosLogin) {
this.datosLogin = datosLogin;
}
public Integer getCantidad() {
return cantidad;
}
public void setCantidad(Integer cantidad) {
this.cantidad = cantidad;
}
public Date getFechaInicio() {
return fechaInicio;
}
public void setFechaInicio(Date fechaInicio) {
this.fechaInicio = fechaInicio;
}
public Date getFechaFin() {
return fechaFin;
}
public void setFechaFin(Date fechaFin) {
this.fechaFin = fechaFin;
}
public String getSeleccion() {
return seleccion;
}
public void setSeleccion(String seleccion) {
this.seleccion = seleccion;
}
@PostConstruct
public void inicializar()
{
this.cantidad =10;
this.fechaFin = new Date();
this.fechaInicio = new Date();
this.seleccion="Movimientos";
cargarNumeroMovimientos();
//Collections.reverse(this.movimientos);
}
public void cargarMovimientos()
{
System.out.println(this.seleccion);
if(this.seleccion.equals("Fechas"))
this.cargarFechasMovimientos();
else
this.cargarNumeroMovimientos();
}
public void cargarFechasMovimientos()
{
this.movimientos = new ArrayList<>();
for(Movimiento m: this.getDatosLogin().getCuentaSelected().getMovimientos())
{
this.movimientos.add(m);
}
for(int i=0;i<this.movimientos.size();i++)
{
if(!((this.movimientos.get(i).getFecha().before(fechaFin)||this.movimientos.get(i).getFecha().equals(fechaFin))
&&(this.movimientos.get(i).getFecha().after(fechaInicio)||this.movimientos.get(i).getFecha().equals(fechaInicio))))
// if(this.movimientos.get(i).getFecha().after(fechaFin) || this.movimientos.get(i).getFecha().before(fechaInicio))
{
this.movimientos.remove(i);
i--;
}
}
}
public void cargarNumeroMovimientos()
{
this.movimientos = new ArrayList<>();
for(Movimiento m: this.getDatosLogin().getCuentaSelected().getMovimientos())
{
this.movimientos.add(m);
}
for(int i=0;i<this.movimientos.size();i++)
{
if(i>=this.cantidad)
{
this.movimientos.remove(i);
i--;
}
}
}
}
<file_sep>
package ec.espe.distri.dao;
import ec.espe.distri.common.dao.DefaultGenericDAOImple;
import ec.espe.distri.modelo.Movimiento;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
@LocalBean
@Stateless
public class MovimientoDAO extends DefaultGenericDAOImple<Movimiento,Integer> {
public MovimientoDAO() {
super(Movimiento.class);
}
}
<file_sep>
package ec.espe.distri.exception;
public class ValidacionException extends RuntimeException {
public ValidacionException (String message)
{
super(message);
}
}
<file_sep>
package ec.espe.distri.serviciosRemotos;
import javax.ejb.Stateless;
@Stateless
public class CajeroServicio implements CajeroServicioRemote
{
@Override
public boolean deposito(String identificador,String cuenta,float monto) {
if(monto>0)
{
return true;
}
return false;
}
@Override
public boolean logueo(String usuario, String clave)
{
System.out.println("logueo("+usuario+","+clave+")");
if(usuario.equals("carlos") && clave.equals("123"))
{
return true;
}
return false;
}
@Override
public float getSaldo(String identificador,String cuenta)
{
return 100000;
}
@Override
public boolean retiro(String identificador,String cuenta, float monto)
{
return true;
}
}
<file_sep>
package ec.espe.distri.servicios;
import ec.espe.distri.dao.CuentaDAO;
import ec.espe.distri.modelo.Cuenta;
import java.util.List;
import javax.ejb.EJB;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
@LocalBean
@Stateless
public class CuentaServicio {
@EJB
private CuentaDAO cuentaDAO;
public List<Cuenta> obtenerTodas()
{
return this.cuentaDAO.findAll();
}
public List<Cuenta> consolidado(Integer codigoCliente)
{
Cuenta cuenta = new Cuenta();
cuenta.setCodigoCliente(codigoCliente);
return this.cuentaDAO.findO(cuenta);
}
}
<file_sep>package ec.espe.distri.persistencia;
import java.util.Date;
import org.bson.types.ObjectId;
import org.mongodb.morphia.annotations.Id;
import org.mongodb.morphia.annotations.Version;
public abstract class BaseEntity {
@Id
protected ObjectId id;
protected Date lastChange;
@Version
private long version;
public ObjectId getId() {
return id;
}
public void setId(ObjectId id) {
this.id = id;
}
public Date getLastChange() {
return lastChange;
}
public void setLastChange(Date lastChange) {
this.lastChange = lastChange;
}
@Override
public String toString() {
return "BaseEntity [id=" + id + ", lastChange=" + lastChange + "]";
}
}
<file_sep>
package ec.espe.distri.modelo;
import java.io.Serializable;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "CLIENTE")
public class Cliente implements Serializable
{
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "CODIGO_CLIENTE", nullable = false)
private Integer codigo;
@Column(name = "NOMBRE", nullable = false)
private String nombre;
@Column(name = "APELLIDO", nullable = false)
private String apellido;
@Column(name = "IDENTIFICACION", nullable = false)
private String identificacion;
@Column(name = "CORREO_ELECTRONICO", nullable = false)
private String correoElectronico;
public Integer getCodigo() {
return codigo;
}
public void setCodigo(Integer codigo) {
this.codigo = codigo;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public String getApellido() {
return apellido;
}
public void setApellido(String apellido) {
this.apellido = apellido;
}
public String getIdentificacion() {
return identificacion;
}
public void setIdentificacion(String identificacion) {
this.identificacion = identificacion;
}
public String getCorreoElectronico() {
return correoElectronico;
}
public void setCorreoElectronico(String correoElectronico) {
this.correoElectronico = correoElectronico;
}
@Override
public int hashCode() {
int hash = 7;
hash = 37 * hash + Objects.hashCode(this.codigo);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Cliente other = (Cliente) obj;
if (!Objects.equals(this.codigo, other.codigo)) {
return false;
}
return true;
}
@Override
public String toString() {
return "Cliente{" + "codigo=" + codigo + ", nombre=" + nombre + ", apellido=" + apellido + ", identificacion=" + identificacion + ", correoElectronico=" + correoElectronico + '}';
}
}
<file_sep>
package ec.espe.distri.servicios;
import javax.annotation.Resource;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
@LocalBean
@Stateless
public class Productor implements ProductorSLSBRemote{
@Resource(mappedName = "jms/ConnectionFactory")
private ConnectionFactory connectionFactory;
@Resource(mappedName = "jms/Queue")
private Queue queue;
public void enviaMensajeJMS(String mensaje) throws JMSException {
Connection connection = null;
Session session = null;
try {
connection = connectionFactory.createConnection();
connection.start();
session = connection.createSession(true, 0);
TextMessage tm = session.createTextMessage(mensaje);
MessageProducer messageProducer = session.createProducer(queue);
messageProducer.send(tm);
} finally {
if (session != null) {
session.close();
}
if (connection != null) {
connection.close();
}
}
}
}
<file_sep>
package ec.espe.distri.dao;
import ec.espe.distri.common.dao.DefaultGenericDAOImple;
import ec.espe.distri.modelo.Cuenta;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
@LocalBean
@Stateless
public class CuentaDAO extends DefaultGenericDAOImple<Cuenta,Integer> {
public CuentaDAO() {
super(Cuenta.class);
}
}
<file_sep>package ec.espe.distri.common.dao;
public final class Order {
public static final String ASC = "A,";
public static final String DESC = "D,";
private Order() {
}
public static String ascendente(String propiedad) {
return ASC+propiedad;
}
public static String descendente(String propiedad) {
return DESC+propiedad;
}
}
<file_sep>
package ec.espe.distri.modelo;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
@Entity
@Table(name = "CUENTA")
public class Cuenta implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "CODIGO_CUENTA", nullable = false)
private Integer codigo;
@Column(name = "CODIGO_CLIENTE", nullable = false)
private Integer codigoCliente;
@JoinColumn(name = "CODIGO_CLIENTE", referencedColumnName = "CODIGO_CLIENTE", insertable = false, updatable = false)
@ManyToOne(optional = false)
private Cliente cliente;
@OneToMany(fetch = FetchType.EAGER, mappedBy = "cuenta")
private List<Movimiento> movimientos;
@Column(name = "NUMERO", nullable = false)
private String numero;
@Column(name = "SALDO", nullable = false)
private BigDecimal saldo;
@Column(name = "TIPO", nullable = false)
private String tipo;
public Integer getCodigo() {
return codigo;
}
public void setCodigo(Integer codigo) {
this.codigo = codigo;
}
public Integer getCodigoCliente() {
return codigoCliente;
}
public void setCodigoCliente(Integer codigoCliente) {
this.codigoCliente = codigoCliente;
}
public Cliente getCliente() {
return cliente;
}
public void setCliente(Cliente cliente) {
this.cliente = cliente;
}
public List<Movimiento> getMovimientos() {
return movimientos;
}
public void setMovimientos(List<Movimiento> movimientos) {
this.movimientos = movimientos;
}
public String getNumero() {
return numero;
}
public void setNumero(String numero) {
this.numero = numero;
}
public BigDecimal getSaldo() {
return saldo;
}
public void setSaldo(BigDecimal saldo) {
this.saldo = saldo;
}
public String getTipo() {
return tipo;
}
public void setTipo(String tipo) {
this.tipo = tipo;
}
@Override
public int hashCode() {
int hash = 3;
hash = 13 * hash + Objects.hashCode(this.codigo);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Cuenta other = (Cuenta) obj;
if (!Objects.equals(this.codigo, other.codigo)) {
return false;
}
return true;
}
@Override
public String toString() {
return "Cuenta{" + "codigo=" + codigo + ", codigoCliente=" + codigoCliente + ", cliente=" + cliente + ", movimientos=" + movimientos + ", numero=" + numero + ", saldo=" + saldo + ", tipo=" + tipo + '}';
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ec.espe.distri.servicios;
import ec.espe.distri.dao.EmpleadoDAO;
import ec.espe.distri.modelo.Empleado;
import java.util.ArrayList;
import java.util.List;
import javax.ejb.EJB;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import org.apache.commons.codec.digest.DigestUtils;
/**
*
* @author RAUL
*/
@LocalBean
@Stateless
public class EmpleadoServicio {
@EJB
private EmpleadoDAO empreadoDAO;
public Empleado buscarPorCodigo(Integer codigo)
{
return this.empreadoDAO.findById(codigo, false);
}
public boolean autentificacion(String contrasenia, Integer codigoEmpleado)
{
Empleado empleado = this.buscarPorCodigo(codigoEmpleado);
if(empleado!=null)
{
if(empleado.getContrasenia().equals(DigestUtils.md5Hex(contrasenia)))
return true;
}
return false;
}
public void registrarEmpleado(Empleado empleado)
{
this.empreadoDAO.insert(empleado);
}
public void crearUsuarios()
{
List<Empleado> empleados = new ArrayList<>();
String pass = "<PASSWORD>";
empleados.add(new Empleado(null,"<NAME>", "9876543210", "<EMAIL>", "cajero1", DigestUtils.md5Hex(pass)));
empleados.add(new Empleado(null,"<NAME>", "9876543211", "<EMAIL>", "cajero2", DigestUtils.md5Hex(pass)));
empleados.add(new Empleado(null,"<NAME>", "9876543212", "<EMAIL>", "cajero3", DigestUtils.md5Hex(pass)));
empleados.add(new Empleado(null,"<NAME>", "9876543213", "<EMAIL>", "cajero4", DigestUtils.md5Hex(pass)));
empleados.add(new Empleado(null,"<NAME>", "9876543214", "<EMAIL>", "cajero5", DigestUtils.md5Hex(pass)));
for(Empleado e: empleados)
this.registrarEmpleado(e);
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ec.espe.distri.web;
import ec.espe.distri.modelo.Cliente;
import ec.espe.distri.modelo.Cuenta;
import ec.espe.distri.modelo.Movimiento;
import ec.espe.distri.modelo.Persona;
import ec.espe.distri.modelo.Usuario;
import ec.espe.distri.servicios.ClienteServicio;
import ec.espe.distri.servicios.CuentaServicio;
import ec.espe.distri.servicios.EmpleadoServicio;
import ec.espe.distri.servicios.MovimientoServicio;
import ec.espe.distri.servicios.PersonaServicio;
import ec.espe.distri.servicios.UsuarioServicio;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.faces.application.FacesMessage;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ManagedProperty;
import javax.faces.bean.ViewScoped;
import javax.faces.context.FacesContext;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.codec.digest.DigestUtils;
/**
*
* @author RAUL
*/
@ViewScoped
@ManagedBean
public class ClienteBean implements Serializable {
@EJB
private ClienteServicio clienteServicio;
private List<Cliente> clientes;
private Cliente cliente;
private String cedula;
private String usuario;
private String contrasenia;
private String correo;
@EJB
private CuentaServicio cuentaServicio;
private List<Cuenta> consolidado;
private Cuenta cuentaSelected;
private UsuarioServicio usuarioServicio;
@EJB
private MovimientoServicio movimientoServicio;
List<Movimiento> movimientos;
@EJB
private EmpleadoServicio empleadoServicio;
@ManagedProperty(value = "#{loginBean}")
private LoginBean datosLogin;
public List<Cliente> getClientes() {
return clientes;
}
public void setClientes(List<Cliente> clientes) {
this.clientes = clientes;
}
public List<Cuenta> getConsolidado() {
return consolidado;
}
public void setConsolidado(List<Cuenta> consolidado) {
this.consolidado = consolidado;
}
public Cliente getCliente() {
return cliente;
}
public void setCliente(Cliente cliente) {
this.cliente = cliente;
}
public String getCedula() {
return cedula;
}
public void setCedula(String cedula) {
this.cedula = cedula;
}
public String getUsuario() {
return usuario;
}
public void setUsuario(String usuario) {
this.usuario = usuario;
}
public String getContrasenia() {
return contrasenia;
}
public void setContrasenia(String contrasenia) {
this.contrasenia = contrasenia;
}
public String getCorreo() {
return correo;
}
public void setCorreo(String correo) {
this.correo = correo;
}
public LoginBean getDatosLogin() {
return datosLogin;
}
public void setDatosLogin(LoginBean datosLogin) {
this.datosLogin = datosLogin;
}
public Cuenta getCuentaSelected() {
return cuentaSelected;
}
public void setCuentaSelected(Cuenta cuentaSelected) {
this.cuentaSelected = cuentaSelected;
}
public List<Movimiento> getMovimientos() {
return movimientos;
}
public void setMovimientos(List<Movimiento> movimientos) {
this.movimientos = movimientos;
}
@PostConstruct
public void inicializar()
{
this.usuarioServicio = new UsuarioServicio();
this.posicionConsolidada();
/*//this.movimientoServicio.retiro(BigDecimal.valueOf(50.52d), 1,"<NAME> 1: <NAME>");
this.movimientoServicio.transferencia(BigDecimal.valueOf(50.52d), 1, 2);
this.clientes = this.clienteServicio.obtenerTodas();
//this.consolidado = this.cuentaServicio.consolidado(this.clientes.get(0).getCodigo());
this.consolidado = this.cuentaServicio.obtenerTodas();
//this.consolidado = this.cuentaServicio.obtenerTodas();
//empleadoServicio.crearUsuarios();*/
}
public void posicionConsolidada()
{
if(this.getDatosLogin().getCliente()!=null)
this.consolidado = this.cuentaServicio.consolidado(this.getDatosLogin().getCliente().getCodigo());
}
public void cargarMovimientos()
{
this.movimientos = this.movimientoServicio.consolidado(this.getDatosLogin().getCuentaSelected().getCodigo());
}
public void setCuentaLogin(Integer codigo) throws IllegalAccessException, InvocationTargetException
{
for(Cuenta c: consolidado)
{
if(c.getCodigo().equals(codigo))
{
this.getDatosLogin().setCuentaSelected(c);
}
}
//this.getDatosLogin().setCuentaSelected(cuenta);
System.out.println(this.getDatosLogin().getCuentaSelected().getCodigo());
}
public void buscarCliente()
{
this.cliente = this.clienteServicio.buscarPorCedula(cedula);
}
public boolean existeCliente()
{
if(this.cliente!=null)
return true;
return false;
}
public void crearCliente()
{
Usuario usuario = new Usuario(this.cliente.getCodigo(), 500.0d, "S",DigestUtils.md5Hex(contrasenia), this.usuario, this.correo);
if(this.usuarioServicio.crearUsuario(usuario))
{
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Info", "El usuario se creó correctamente"));
reset();
}
else
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Error!", "El usuario ya existe"));
}
public void reset()
{
this.cliente = null;
this.usuario="";
this.contrasenia="";
this.correo="";
}
}
| 6c9b07ea11420de1767c59d4bab5dc5c42853357 | [
"Java"
] | 14 | Java | Chintan2569/eBanking-master | 5a2f4c65cf9a653fdbf9d1689fedd98d84b307e9 | 69d46224d696a264de286a15024310b8545e6957 |
refs/heads/master | <repo_name>thomasantonakis/gacd_cp<file_sep>/README.md
Getting and Cleaning Data Course Project CodeBook
=================================================
This file describes the variables, the data, and any transformations or work that I have performed to clean up the data.
* The site where the data was obtained:
http://archive.ics.uci.edu/ml/datasets/Human+Activity+Recognition+Using+Smartphones
The data for the project:
https://d396qusza40orc.cloudfront.net/getdata%2Fprojectfiles%2FUCI%20HAR%20Dataset.zip
* The run_analysis.R script performs the following steps to clean the data:
1. Read X_test.txt, y_test.txt and subject_test.txt from the "./UCI HAR Dataset/test" folder and store them in *test_561*, *test_feat* and *test_subj* variables respectively.
2. Read X_train.txt, y_train.txt and subject_train.txt from the "./UCI HAR Dataset/train" folder and store them in *train_561*, *train_feat* and *train_subj* variables respectively.
3. Read features.txt from "./UCI HAR Dataset" folder and store it to features variable.
4. Bind by columms (one next to the previous) the variables *train_subj*, *train_feat* and *train_561* to create the *train_whole* variable.
5. Bind by columms (one next to the previous) the variables *test_subj*, *test_feat* and *test_561* to create the *test_whole* variable.
6. Bind by rows (one under the previous) the variables *train_whole* and *test_whole* to create the *final1* variable. final 1 is a dataframe with 10299 rows and 563 columns. Proper names are given to the columns of this dataframe using also the data from the features variable.
7. In the second step, we try to subset the final1 data frame to only those variables (columns) that contain the "mean()" or the "std()" strings.
8. finalmean and finalstd dataframes do the above subsetting according to their name. That is finalmean contains only the columns from final1 that contain the string "mean()" in theri column name, whereas finalstd contains only the columns from final1 that contain the string "std()" in theri column name. Both these dataframes contain 10299 rows and 33 columns each.
9. Concatenate by columns (one under the previous) the first two columns from the final1 dataframe, for reference reasons, then the finalmean dataframe and finally the finalstd dataframe. Save this giant dataframe under the name of final2 and now final2 dataframe contains 10299 rows and 68 columns.
10. Clean the column names of the subset. We remove the "()" and "-" symbols in the names, as well as make the first letter of "mean" and "std" a capital letter "M" and "S" respectively.
11. Read the activity_labels.txt file from the "./data"" folder and store the data in a variable called *activity*.
12. Clean the activity names in the second column of *activity*. We first make all names to lower cases. If the name has an underscore between letters, we remove the underscore and capitalize the letter immediately after the underscore and the fisrt letter.
13. we change the values of the activity (second) column of the final2 dataframe according to the name corresonding to each activity code usind data from the activity dataframe.
14. Now the final2 dataframe is a tidy dataframe as proper and descriptive names have been assigned to both column names and activity codings.
15. Finally, generate a second independent tidy data set with the average of each measurement for each activity and each subject. We have 30 unique subjects and 6 unique activities, which result in a 180 combinations of the two. Then, for each combination, we calculate the mean of each measurement with the corresponding combination. So, after initializing the *tidyData* data frame and performing the two for-loops, we get a 180x68 data frame.
16. Write the *tidyData* out to "data_with_means.txt" file in current working directory.
<file_sep>/CodeBook.md
Code Book for tidy dataframe produced and submitted in this Course Project.
=====================================================================================
The features selected for this database come from the accelerometer and gyroscope 3-axial raw signals tAcc-XYZ and tGyro-XYZ. These time domain signals (prefix 't' to denote time) were captured at a constant rate of 50 Hz. Then they were filtered using a median filter and a 3rd order low pass Butterworth filter with a corner frequency of 20 Hz to remove noise. Similarly, the acceleration signal was then separated into body and gravity acceleration signals (tBodyAcc-XYZ and tGravityAcc-XYZ) using another low pass Butterworth filter with a corner frequency of 0.3 Hz.
Subsequently, the body linear acceleration and angular velocity were derived in time to obtain Jerk signals (tBodyAccJerk-XYZ and tBodyGyroJerk-XYZ). Also the magnitude of these three-dimensional signals were calculated using the Euclidean norm (tBodyAccMag, tGravityAccMag, tBodyAccJerkMag, tBodyGyroMag, tBodyGyroJerkMag).
Finally a Fast Fourier Transform (FFT) was applied to some of these signals producing fBodyAcc-XYZ, fBodyAccJerk-XYZ, fBodyGyro-XYZ, fBodyAccJerkMag, fBodyGyroMag, fBodyGyroJerkMag. (Note the 'f' to indicate frequency domain signals).
These signals were used to estimate variables of the feature vector for each pattern:
'-XYZ' is used to denote 3-axial signals in the X, Y and Z directions.
"subject" 1
Subject code
1 .Subject 1
2 .Subject 2
3 .Subject 3
4 .Subject 4
5 .Subject 5
6 .Subject 6
7 .Subject 7
8 .Subject 8
9 .Subject 9
10 .Subject 10
11 .Subject 11
12 .Subject 12
13 .Subject 13
14 .Subject 14
15 .Subject 15
16 .Subject 16
17 .Subject 17
18 .Subject 18
19 .Subject 19
20 .Subject 20
21 .Subject 21
22 .Subject 22
23 .Subject 23
24 .Subject 24
25 .Subject 25
26 .Subject 26
27 .Subject 27
28 .Subject 28
29 .Subject 29
30 .Subject 30
"activity" 2
Activity Name
Walking
WalkingUpstairs
WalkingDownstairs
Sitting
Standing
Laying
"tBodyAccMeanX"
Mean Value of Body Acceleration in axis X
-1..1
"tBodyAccMeanY"
Mean Value of Body Acceleration in axis Y
-1..1
"tBodyAccMeanZ"
Mean Value of Body Acceleration in axis X
-1..1
"tGravityAccMeanX"
Mean Value of Gravity Acceleration in axis X
-1..1
"tGravityAccMeanY"
Mean Value of Gravity Acceleration in axis Y
-1..1
"tGravityAccMeanZ"
Mean Value of Gravity Acceleration in axis Z
-1..1
"tBodyAccJerkMeanX"
Mean Value of body linear acceleration in axis X
-1..1
"tBodyAccJerkMeanY"
Mean Value of body linear acceleration in axis Y
-1..1
"tBodyAccJerkMeanZ"
Mean Value of body linear acceleration in axis X
-1..1
"tBodyGyroMeanX"
Mean Value of Gyroscope signal in axis X
-1..1
"tBodyGyroMeanY"
Mean Value of Gyroscope signal in axis Y
-1..1
"tBodyGyroMeanZ"
Mean Value of Gyroscope signal in axis Z
-1..1
"tBodyGyroJerkMeanX"
Mean Value of Angular Velocity ofGyroscope signal in axis X
-1..1
"tBodyGyroJerkMeanY"
Mean Value of Angular Velocity ofGyroscope signal in axis Y
-1..1
"tBodyGyroJerkMeanZ"
Mean Value of Angular Velocity ofGyroscope signal in axis Z
-1..1
"tBodyAccMagMean"
Mean Value of Magnitude of Body Acceleration
-1..1
"tGravityAccMagMean"
Mean Value of Magnitude of Gravity Acceleration
-1..1
"tBodyAccJerkMagMean"
Mean Value of Magnitude of body linear acceleration
-1..1
"tBodyGyroMagMean"
Mean Value of Magnitude of Gyroscope signal
-1..1
"tBodyGyroJerkMagMean"
Mean Value of Magnitude of Angular Velocity ofGyroscope signal
-1..1
"fBodyAccMeanX"
Mean Value of Body Acceleration in axis X (FFT)
-1..1
"fBodyAccMeanY"
Mean Value of Body Acceleration in axis Y (FFT)
-1..1
"fBodyAccMeanZ"
Mean Value of Body Acceleration in axis Z (FFT)
-1..1
"fBodyAccJerkMeanX"
Mean Value of body linear acceleration in axis X (FFT)
-1..1
"fBodyAccJerkMeanY"
Mean Value of body linear acceleration in axis Y (FFT)
-1..1
"fBodyAccJerkMeanZ"
Mean Value of body linear acceleration in axis Z (FFT)
-1..1
"fBodyGyroMeanX"
Mean Value of Gyroscope signal in axis X (FFT)
-1..1
"fBodyGyroMeanY"
Mean Value of Gyroscope signal in axis Y (FFT)
-1..1
"fBodyGyroMeanZ"
Mean Value of Gyroscope signal in axis Z (FFT)
-1..1
"fBodyAccMagMean"
Mean Value of Magnitude of Body Acceleration (FFT)
-1..1
"fBodyBodyAccJerkMagMean"
Mean Value of Magnitude of body linear acceleration (FFT)
-1..1
"fBodyBodyGyroMagMean"
Mean Value of Magnitude of Gyroscope signal (FFT)
-1..1
"fBodyBodyGyroJerkMagMean"
Mean Value of Magnitude of Angular Velocity ofGyroscope signal (FFT)
-1..1
"tBodyAccStdX"
Standard Deviation of Body Acceleration in axis X
-1..1
"tBodyAccStdY"
Standard Deviation of Body Acceleration in axis Y
-1..1
"tBodyAccStdZ"
Standard Deviation of Body Acceleration in axis Z
-1..1
"tGravityAccStdX"
Standard Deviation of Gravity Acceleration in axis X
-1..1
"tGravityAccStdY"
Standard Deviation of Gravity Acceleration in axis Y
-1..1
"tGravityAccStdZ"
Standard Deviation of Gravity Acceleration in axis Z
-1..1
"tBodyAccJerkStdX"
Standard Deviation of body linear acceleration in axis X
-1..1
"tBodyAccJerkStdY"
Standard Deviation of body linear acceleration in axis Y
-1..1
"tBodyAccJerkStdZ"
Standard Deviation of body linear acceleration in axis Z
-1..1
"tBodyGyroStdX"
Standard Deviation of Gyroscope signal in axis X
-1..1
"tBodyGyroStdY"
Standard Deviation of Gyroscope signal in axis Y
-1..1
"tBodyGyroStdZ"
Standard Deviation of Gyroscope signal in axis Z
-1..1
"tBodyGyroJerkStdX"
Standard Deviation of Angular Velocity ofGyroscope signal in axis X
-1..1
"tBodyGyroJerkStdY"
Standard Deviation of Angular Velocity ofGyroscope signal in axis Y
-1..1
"tBodyGyroJerkStdZ"
Standard Deviation of Angular Velocity ofGyroscope signal in axis Z
-1..1
"tBodyAccMagStd"
Standard Deviation of Magnitude of Body Acceleration
-1..1
"tGravityAccMagStd"
Standard Deviation of Magnitude of Gravity Acceleration
-1..1
"tBodyAccJerkMagStd"
Standard Deviation of Magnitude of body linear acceleration
-1..1
"tBodyGyroMagStd"
Standard Deviation of Magnitude of Gyroscope signal
-1..1
"tBodyGyroJerkMagStd"
Standard Deviation of Magnitude of Angular Velocity ofGyroscope signal
-1..1
"fBodyAccStdX"
Standard Deviation of Body Acceleration in axis X (FFT)
-1..1
"fBodyAccStdY"
Standard Deviation of Body Acceleration in axis X (FFT)
-1..1
"fBodyAccStdZ"
Standard Deviation of Body Acceleration in axis X (FFT)
-1..1
"fBodyAccJerkStdX"
Standard Deviation of body linear acceleration in axis X (FFT)
-1..1
"fBodyAccJerkStdY"
Standard Deviation of body linear acceleration in axis Y (FFT)
-1..1
"fBodyAccJerkStdZ"
Standard Deviation of body linear acceleration in axis Z (FFT)
-1..1
"fBodyGyroStdX"
Standard Deviation of Gyroscope signal in axis X (FFT)
-1..1
"fBodyGyroStdY"
Standard Deviation of Gyroscope signal in axis Y (FFT)
-1..1
"fBodyGyroStdZ"
Standard Deviation of Gyroscope signal in axis Z (FFT)
-1..1
"fBodyAccMagStd"
Standard Deviation of Magnitude of Body Acceleration (FFT)
-1..1
"fBodyBodyAccJerkMagStd"
Standard Deviation of Magnitude of body linear acceleration (FFT)
-1..1
"fBodyBodyGyroMagStd"
Standard Deviation of Magnitude of Gyroscope signal (FFT)
-1..1
"fBodyBodyGyroJerkMagStd"
Standard Deviation of Magnitude of Angular Velocity ofGyroscope signal (FFT)
-1..1
<file_sep>/run_analysis.R
gacd_cp<-function(){
# Step1. Merges the training and the test sets to create one data set. (final1)
test_subj<-read.table("./UCI HAR Dataset/test/subject_test.txt", sep=" ")
test_feat<-read.table("./UCI HAR Dataset/test/y_test.txt", sep=" ")
test_561<-read.table("./UCI HAR Dataset/test/X_test.txt", sep="")
train_subj<-read.table("./UCI HAR Dataset/train/subject_train.txt", sep=" ")
train_feat<-read.table("./UCI HAR Dataset/train/y_train.txt", sep=" ")
train_561<-read.table("./UCI HAR Dataset/train/X_train.txt", sep="")
features<-read.table("./UCI HAR Dataset/features.txt", sep=" ")
train_whole<-cbind(train_subj, train_feat, train_561)
test_whole<-cbind(test_subj, test_feat, test_561)
final1<-rbind(train_whole, test_whole)
names(final1)<-c("subject", "activity", as.character(features$V2))
# Step2. Extracts only the measurements on the mean and standard
# deviation for each measurement.
finalmean<-final1[,grep("mean\\(\\)", colnames(final1))]
finalstd<-final1[,grep("std\\(\\)", colnames(final1))]
final2<-cbind(final1$subject, final1$activity, finalmean, finalstd)
# Step 4. Appropriately labels the data set with descriptive variable names.
names(final2)[1:2] <-c("subject", "activity")
names(final2) <- gsub("\\(\\)", "", names(final2)) # remove "()"
names(final2) <- gsub("mean", "Mean", names(final2)) # capitalize M
names(final2) <- gsub("std", "Std", names(final2)) # capitalize S
names(final2) <- gsub("-", "", names(final2)) # remove "-" in column names
# Step3. Uses descriptive activity names to name the activities
# in the data set
activity <- read.table("./UCI HAR Dataset/activity_labels.txt")
activity[, 2] <- tolower(gsub("_", "", activity[, 2]))
substr(activity[2, 2], 8, 8) <- toupper(substr(activity[2, 2], 8, 8)) # capitalize u of Uptsairs
substr(activity[3, 2], 8, 8) <- toupper(substr(activity[3, 2], 8, 8)) # capitalize d of Downstairs
substr(activity[, 2], 1, 1) <- toupper(substr(activity[, 2], 1, 1)) # capitalize 1st letter
actvlookup<-final2$activity
for (i in 1:length(actvlookup)) {
actvlookup[i]<-activity[final2$activity[i],2]
}
final2$activity<-actvlookup
#table(actvlookup)
# Step 5. Creates a second, independent tidy data set with the average of each
# variable for each activity and each subject.
# create a matrix with number of rows the product of the subjects and the activities
# and number of columns the columns of the final2 dataset
# Names the parameters
subjcount <- length(table(final2$subject))
acticount <- dim(activity)[1]
colscount <- dim(final2)[2]
tidyData <- matrix(NA, nrow=subjcount*acticount, ncol=colscount)
tidyData <- as.data.frame(tidyData)
colnames(tidyData) <- colnames(final2)
iteration<-1
for(i in 1:subjcount) {
for(j in 1:acticount) {
tidyData[iteration, 1] <- i
tidyData[iteration, 2] <- activity[j, 2]
selectsubj <- i == final2$subject
selectact <- activity[j, 2] == final2$activity
tidyData[iteration, 3:dim(final2)[2]] <- colMeans(final2[selectsubj&selectact, 3:dim(final2)[2]])
iteration<-iteration+1
}
}
write.table(tidyData, file="./UCI HAR Dataset/tidyData.txt", row.name=FALSE)
} | 8b9f632339f4bb49f199e2bad025f4081108c960 | [
"Markdown",
"R"
] | 3 | Markdown | thomasantonakis/gacd_cp | 178cbc25fcaf497a739683fcffe642858f31f427 | 35c0a18ab49619ebaaef207d1e87bc1377d564e3 |
refs/heads/master | <repo_name>seegno/eslint-config-seegno<file_sep>/CHANGELOG.md
# Changelog
## [v17.0.0](https://github.com/seegno/eslint-config-seegno/tree/v17.0.0) (2022-05-31)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v16.0.0...v17.0.0)
**Merged pull requests:**
- Update eslint-plugin-react-hooks dependency [\#125](https://github.com/seegno/eslint-config-seegno/pull/125) ([MicaelRodrigues](https://github.com/MicaelRodrigues))
- Bump ini from 1.3.5 to 1.3.8 [\#123](https://github.com/seegno/eslint-config-seegno/pull/123) ([dependabot[bot]](https://github.com/apps/dependabot))
- Add license file [\#122](https://github.com/seegno/eslint-config-seegno/pull/122) ([bsonntag](https://github.com/bsonntag))
- Bump path-parse from 1.0.6 to 1.0.7 [\#121](https://github.com/seegno/eslint-config-seegno/pull/121) ([dependabot[bot]](https://github.com/apps/dependabot))
- Bump glob-parent from 5.1.0 to 5.1.2 [\#120](https://github.com/seegno/eslint-config-seegno/pull/120) ([dependabot[bot]](https://github.com/apps/dependabot))
- Bump lodash from 4.17.20 to 4.17.21 [\#119](https://github.com/seegno/eslint-config-seegno/pull/119) ([dependabot[bot]](https://github.com/apps/dependabot))
## [v16.0.0](https://github.com/seegno/eslint-config-seegno/tree/v16.0.0) (2020-08-17)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v15.0.0...v16.0.0)
**Merged pull requests:**
- Bump lodash from 4.17.15 to 4.17.20 [\#112](https://github.com/seegno/eslint-config-seegno/pull/112) ([dependabot[bot]](https://github.com/apps/dependabot))
- Bump acorn from 7.1.0 to 7.1.1 [\#109](https://github.com/seegno/eslint-config-seegno/pull/109) ([dependabot[bot]](https://github.com/apps/dependabot))
- Update flowtype/delimiter-dangle rule [\#108](https://github.com/seegno/eslint-config-seegno/pull/108) ([bsonntag](https://github.com/bsonntag))
- Bump lodash from 4.17.11 to 4.17.15 [\#106](https://github.com/seegno/eslint-config-seegno/pull/106) ([dependabot[bot]](https://github.com/apps/dependabot))
- Bump mixin-deep from 1.3.1 to 1.3.2 [\#105](https://github.com/seegno/eslint-config-seegno/pull/105) ([dependabot[bot]](https://github.com/apps/dependabot))
- Bump eslint-utils from 1.3.1 to 1.4.3 [\#104](https://github.com/seegno/eslint-config-seegno/pull/104) ([dependabot[bot]](https://github.com/apps/dependabot))
- Add support for eslint v6 [\#103](https://github.com/seegno/eslint-config-seegno/pull/103) ([bsonntag](https://github.com/bsonntag))
- Fix react/jsx-curly-brace-presence rule [\#102](https://github.com/seegno/eslint-config-seegno/pull/102) ([bsonntag](https://github.com/bsonntag))
## [v15.0.0](https://github.com/seegno/eslint-config-seegno/tree/v15.0.0) (2019-04-16)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v14.0.0...v15.0.0)
**Merged pull requests:**
- Add react-hooks/exhaustive-deps rule [\#100](https://github.com/seegno/eslint-config-seegno/pull/100) ([bsonntag](https://github.com/bsonntag))
## [v14.0.0](https://github.com/seegno/eslint-config-seegno/tree/v14.0.0) (2019-04-08)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v13.0.0...v14.0.0)
**Merged pull requests:**
- Add pre-commit hook [\#101](https://github.com/seegno/eslint-config-seegno/pull/101) ([bsonntag](https://github.com/bsonntag))
- Update package dependencies [\#97](https://github.com/seegno/eslint-config-seegno/pull/97) ([johnmaia](https://github.com/johnmaia))
## [v13.0.0](https://github.com/seegno/eslint-config-seegno/tree/v13.0.0) (2019-03-27)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v12.0.0...v13.0.0)
**Merged pull requests:**
- Add react prefer-stateless-function rule [\#99](https://github.com/seegno/eslint-config-seegno/pull/99) ([rafaelcruzazevedo](https://github.com/rafaelcruzazevedo))
- Add react jsx-tag-spacing rule [\#98](https://github.com/seegno/eslint-config-seegno/pull/98) ([rafaelcruzazevedo](https://github.com/rafaelcruzazevedo))
- Add react hooks rules [\#96](https://github.com/seegno/eslint-config-seegno/pull/96) ([flaviocastro](https://github.com/flaviocastro))
- Fix ESLint errors [\#94](https://github.com/seegno/eslint-config-seegno/pull/94) ([flaviocastro](https://github.com/flaviocastro))
- Fix newline-between-switch-case fallthrough [\#93](https://github.com/seegno/eslint-config-seegno/pull/93) ([flaviocastro](https://github.com/flaviocastro))
## [v12.0.0](https://github.com/seegno/eslint-config-seegno/tree/v12.0.0) (2019-01-15)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v11.0.1...v12.0.0)
**Merged pull requests:**
- Add react class element order rules [\#92](https://github.com/seegno/eslint-config-seegno/pull/92) ([jmacedoit](https://github.com/jmacedoit))
## [v11.0.1](https://github.com/seegno/eslint-config-seegno/tree/v11.0.1) (2018-10-09)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v11.0.0...v11.0.1)
**Merged pull requests:**
- Fix sort destructure keys rule [\#91](https://github.com/seegno/eslint-config-seegno/pull/91) ([bsonntag](https://github.com/bsonntag))
## [v11.0.0](https://github.com/seegno/eslint-config-seegno/tree/v11.0.0) (2018-10-04)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v10.0.0...v11.0.0)
**Merged pull requests:**
- Add react and flow rules [\#90](https://github.com/seegno/eslint-config-seegno/pull/90) ([bsonntag](https://github.com/bsonntag))
- Update node supported versions [\#89](https://github.com/seegno/eslint-config-seegno/pull/89) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Update dependencies [\#87](https://github.com/seegno/eslint-config-seegno/pull/87) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Add sort-destructuring-keys rule [\#86](https://github.com/seegno/eslint-config-seegno/pull/86) ([johnmaia](https://github.com/johnmaia))
- Update eslint@4.8.0 [\#85](https://github.com/seegno/eslint-config-seegno/pull/85) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Add new-with-error plugin and rules [\#84](https://github.com/seegno/eslint-config-seegno/pull/84) ([abelsoares](https://github.com/abelsoares))
- Add switch-case plugin and rules [\#83](https://github.com/seegno/eslint-config-seegno/pull/83) ([abelsoares](https://github.com/abelsoares))
## [v10.0.0](https://github.com/seegno/eslint-config-seegno/tree/v10.0.0) (2017-09-29)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v9.0.0...v10.0.0)
**Merged pull requests:**
- Update babel-eslint@8.0.1 [\#82](https://github.com/seegno/eslint-config-seegno/pull/82) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Add node 8 to travis configuration [\#81](https://github.com/seegno/eslint-config-seegno/pull/81) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Add jest plugin and rules [\#80](https://github.com/seegno/eslint-config-seegno/pull/80) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Improve incorrect code test [\#79](https://github.com/seegno/eslint-config-seegno/pull/79) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Fix typo in environment.js [\#78](https://github.com/seegno/eslint-config-seegno/pull/78) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Update eslint to version v4.7.2 [\#77](https://github.com/seegno/eslint-config-seegno/pull/77) ([nunorafaelrocha](https://github.com/nunorafaelrocha))
- Enable `require-await` rule [\#75](https://github.com/seegno/eslint-config-seegno/pull/75) ([ricardogama](https://github.com/ricardogama))
- Add jasmine and jest environments [\#72](https://github.com/seegno/eslint-config-seegno/pull/72) ([ricardogama](https://github.com/ricardogama))
- Add `capitalized-comments` rule [\#67](https://github.com/seegno/eslint-config-seegno/pull/67) ([rplopes](https://github.com/rplopes))
## [v9.0.0](https://github.com/seegno/eslint-config-seegno/tree/v9.0.0) (2017-03-07)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v8.0.1...v9.0.0)
**Merged pull requests:**
- Replace sorting plugin with sort keys [\#70](https://github.com/seegno/eslint-config-seegno/pull/70) ([nunofgs](https://github.com/nunofgs))
- Add missing rules and improve tests [\#69](https://github.com/seegno/eslint-config-seegno/pull/69) ([nunofgs](https://github.com/nunofgs))
- Enforce the newline-before-return rule [\#68](https://github.com/seegno/eslint-config-seegno/pull/68) ([filipefigcorreia](https://github.com/filipefigcorreia))
## [v8.0.1](https://github.com/seegno/eslint-config-seegno/tree/v8.0.1) (2016-11-22)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v8.0.0...v8.0.1)
**Merged pull requests:**
- Remove eslint-plugin-babel [\#66](https://github.com/seegno/eslint-config-seegno/pull/66) ([kurayama](https://github.com/kurayama))
## [v8.0.0](https://github.com/seegno/eslint-config-seegno/tree/v8.0.0) (2016-10-12)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v7.0.0...v8.0.0)
**Merged pull requests:**
- Update sort-imports rule [\#64](https://github.com/seegno/eslint-config-seegno/pull/64) ([ricardogama](https://github.com/ricardogama))
- Update mocha@3.1.1 [\#63](https://github.com/seegno/eslint-config-seegno/pull/63) ([rplopes](https://github.com/rplopes))
- Add .npmignore for smaller builds [\#62](https://github.com/seegno/eslint-config-seegno/pull/62) ([ruimarinho](https://github.com/ruimarinho))
## [v7.0.0](https://github.com/seegno/eslint-config-seegno/tree/v7.0.0) (2016-10-03)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v6.0.0...v7.0.0)
**Merged pull requests:**
- Add sql template plugin and `sql-template/no-unsafe-query` rule [\#61](https://github.com/seegno/eslint-config-seegno/pull/61) ([rplopes](https://github.com/rplopes))
- Add `one-var-declaration-per-line` rule [\#60](https://github.com/seegno/eslint-config-seegno/pull/60) ([rplopes](https://github.com/rplopes))
- Add `brace-style` rule [\#59](https://github.com/seegno/eslint-config-seegno/pull/59) ([rplopes](https://github.com/rplopes))
- Fix computed property template string sorting issue [\#58](https://github.com/seegno/eslint-config-seegno/pull/58) ([ruiquelhas](https://github.com/ruiquelhas))
- Enable `newline-after-var` rule [\#56](https://github.com/seegno/eslint-config-seegno/pull/56) ([ruiquelhas](https://github.com/ruiquelhas))
## [v6.0.0](https://github.com/seegno/eslint-config-seegno/tree/v6.0.0) (2016-07-20)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v5.0.0...v6.0.0)
**Merged pull requests:**
- Update rules to disallow any dangling underscore [\#51](https://github.com/seegno/eslint-config-seegno/pull/51) ([ruiquelhas](https://github.com/ruiquelhas))
- Update `id-match` to validate declarations only [\#49](https://github.com/seegno/eslint-config-seegno/pull/49) ([ruiquelhas](https://github.com/ruiquelhas))
- Disable warning comment errors [\#47](https://github.com/seegno/eslint-config-seegno/pull/47) ([ruiquelhas](https://github.com/ruiquelhas))
## [v5.0.0](https://github.com/seegno/eslint-config-seegno/tree/v5.0.0) (2016-07-15)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v4.0.0...v5.0.0)
**Merged pull requests:**
- Enable JSCS compatible rules [\#45](https://github.com/seegno/eslint-config-seegno/pull/45) ([ruiquelhas](https://github.com/ruiquelhas))
- Add recommended configuration of sort-class-members plugin [\#44](https://github.com/seegno/eslint-config-seegno/pull/44) ([joaogranado](https://github.com/joaogranado))
- Update README.md to add missing dependency [\#39](https://github.com/seegno/eslint-config-seegno/pull/39) ([pgom](https://github.com/pgom))
- Add indentation validation [\#38](https://github.com/seegno/eslint-config-seegno/pull/38) ([promag](https://github.com/promag))
- Fix dependency peer mismatch [\#34](https://github.com/seegno/eslint-config-seegno/pull/34) ([ruiquelhas](https://github.com/ruiquelhas))
## [v4.0.0](https://github.com/seegno/eslint-config-seegno/tree/v4.0.0) (2016-04-05)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v3.1.0...v4.0.0)
**Merged pull requests:**
- Update release-related scripts [\#37](https://github.com/seegno/eslint-config-seegno/pull/37) ([ruiquelhas](https://github.com/ruiquelhas))
- Add rule to sort imports [\#36](https://github.com/seegno/eslint-config-seegno/pull/36) ([ruimarinho](https://github.com/ruimarinho))
- Add plugin to sort class members [\#31](https://github.com/seegno/eslint-config-seegno/pull/31) ([ruimarinho](https://github.com/ruimarinho))
## [v3.1.0](https://github.com/seegno/eslint-config-seegno/tree/v3.1.0) (2016-03-29)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v3.0.0...v3.1.0)
**Merged pull requests:**
- Disallow duplicate imports [\#35](https://github.com/seegno/eslint-config-seegno/pull/35) ([ruimarinho](https://github.com/ruimarinho))
- Use string for severity in config [\#30](https://github.com/seegno/eslint-config-seegno/pull/30) ([ruimarinho](https://github.com/ruimarinho))
- Disable complexity rule [\#29](https://github.com/seegno/eslint-config-seegno/pull/29) ([ruimarinho](https://github.com/ruimarinho))
- Disable max-nested-callbacks rule [\#28](https://github.com/seegno/eslint-config-seegno/pull/28) ([ruimarinho](https://github.com/ruimarinho))
- Require arrow parentheses as needed [\#26](https://github.com/seegno/eslint-config-seegno/pull/26) ([ruimarinho](https://github.com/ruimarinho))
## [v3.0.0](https://github.com/seegno/eslint-config-seegno/tree/v3.0.0) (2016-03-06)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v2.0.0...v3.0.0)
**Merged pull requests:**
- Update rules to support eslint 2.x.x [\#27](https://github.com/seegno/eslint-config-seegno/pull/27) ([ruiquelhas](https://github.com/ruiquelhas))
## [v2.0.0](https://github.com/seegno/eslint-config-seegno/tree/v2.0.0) (2016-02-03)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v1.2.1...v2.0.0)
**Merged pull requests:**
- Export config with standard format [\#19](https://github.com/seegno/eslint-config-seegno/pull/19) ([ruimarinho](https://github.com/ruimarinho))
## [v1.2.1](https://github.com/seegno/eslint-config-seegno/tree/v1.2.1) (2016-01-12)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v1.2.0...v1.2.1)
**Merged pull requests:**
- Remove duplicated rules [\#22](https://github.com/seegno/eslint-config-seegno/pull/22) ([joaogranado](https://github.com/joaogranado))
## [v1.2.0](https://github.com/seegno/eslint-config-seegno/tree/v1.2.0) (2015-11-30)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v1.1.1...v1.2.0)
**Merged pull requests:**
- Remove `no-ternary` rule [\#15](https://github.com/seegno/eslint-config-seegno/pull/15) ([ruiquelhas](https://github.com/ruiquelhas))
- Update `arrow-parens` rule [\#14](https://github.com/seegno/eslint-config-seegno/pull/14) ([pgom](https://github.com/pgom))
## [v1.1.1](https://github.com/seegno/eslint-config-seegno/tree/v1.1.1) (2015-11-11)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v1.1.0...v1.1.1)
**Merged pull requests:**
- Fix async generator-star issues [\#11](https://github.com/seegno/eslint-config-seegno/pull/11) ([ruiquelhas](https://github.com/ruiquelhas))
- Fix arrow function in variable declarations [\#10](https://github.com/seegno/eslint-config-seegno/pull/10) ([nunofgs](https://github.com/nunofgs))
- Fix async arrow functions [\#9](https://github.com/seegno/eslint-config-seegno/pull/9) ([nunofgs](https://github.com/nunofgs))
## [v1.1.0](https://github.com/seegno/eslint-config-seegno/tree/v1.1.0) (2015-11-05)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/1.1.0...v1.1.0)
## [1.1.0](https://github.com/seegno/eslint-config-seegno/tree/1.1.0) (2015-11-05)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/v1.0.0...1.1.0)
**Merged pull requests:**
- Add missing ES6 rules [\#5](https://github.com/seegno/eslint-config-seegno/pull/5) ([nunofgs](https://github.com/nunofgs))
## [v1.0.0](https://github.com/seegno/eslint-config-seegno/tree/v1.0.0) (2015-11-02)
[Full Changelog](https://github.com/seegno/eslint-config-seegno/compare/1.0.0...v1.0.0)
## [1.0.0](https://github.com/seegno/eslint-config-seegno/tree/1.0.0) (2015-11-02)
**Merged pull requests:**
- Add first version [\#1](https://github.com/seegno/eslint-config-seegno/pull/1) ([ruiquelhas](https://github.com/ruiquelhas))
<file_sep>/README.md
# eslint-config-seegno
Seegno-flavored ESLint config.
## Installation
```sh
$ npm install eslint eslint-config-seegno --save-dev
```
## Usage
Create an `.eslintrc.yml` file with the following:
```yaml
extends: seegno
```
Add the following `script` to your `package.json`:
```json
{
"scripts": {
"lint": "eslint ."
}
}
```
and run the linter with:
```sh
$ npm run lint
```
<file_sep>/test/fixtures/correct.js
// @flow
// `jasmine`, `jest` and `mocha` envs.
/* eslint-disable jest/no-disabled-tests */
/* eslint-disable jest/no-focused-tests */
after();
afterAll();
afterEach();
before();
beforeAll();
beforeEach();
context();
describe();
expect();
fail();
fdescribe();
fit();
it();
jasmine();
jest();
mocha();
pending();
pit();
require();
run();
runs();
setup();
specify();
spyOn();
suite();
suiteSetup();
suiteTeardown();
teardown();
test();
waits();
waitsFor();
xcontext();
xdescribe();
xit();
xspecify();
xtest();
/* eslint-enable jest/no-disabled-tests */
/* eslint-enable jest/no-focused-tests */
// Avoid extra `no-unused-vars` violations.
function noop() {
// Do nothing
}
// Declare `React` and other react-related variables.
const React = null;
// `array-bracket-spacing`, `comma-spacing` and `no-multi-spaces`.
noop(['bar', 'foo']);
// `arrow-parens`
noop(() => 'bar');
noop(foo => foo);
noop((foo, bar) => [foo, bar]);
// `brace-style`.
try {
noop();
} catch (e) {
noop();
}
noop(function *() { return yield noop(); });
// `capitalized-comments`.
noop();
// First line must be capitalized.
// following lines don't.
// `comma-dangle`, `comma-style`.
noop({ bar: 'foo', foo: 'bar' });
// `consistent-this`.
const self = this;
noop(self);
// `curly`, `keyword-spacing`, `no-empty` and `space-before-blocks`.
let mixedRules = true;
if (mixedRules) {
mixedRules = false;
} else {
mixedRules = true;
}
// `dot-notation`.
const dotNotation = {};
dotNotation.foo = 'bar';
// `flowtype/delimiter-dangle`
type DelimiterDangleType = {
bar: string
};
interface DelimiterDangleInterface {
bar: string;
}
noop(DelimiterDangleType);
noop(DelimiterDangleInterface);
// `generator-star-spacing`
noop(function *() {});
noop(function *foo() {});
noop({ *foo() {} });
// `id-match`.
let idmatch;
let idMatch;
let IdMatch;
let IDMatch;
let IDMATCH;
let ID_MATCH;
let ID_M_ATCH;
noop(idmatch);
noop(idMatch);
noop(IdMatch);
noop(IDMatch);
noop(IDMATCH);
noop(ID_MATCH);
noop(ID_M_ATCH);
noop(__dirname);
noop(`${__dirname}`);
// `jest/no-disabled-tests` and `mocha/no-exclusive-tests`.
describe('noExclusiveTests', () => {
it('should work with `it`', () => {});
test('should work with `test`', () => {});
});
// `key-spacing`.
noop({ foo: 'bar' });
// `new-cap`.
const Cap = require('cap');
const newCap = new Cap();
noop(newCap);
// `newline-before-return`.
function funcThatReturns(bar) {
if (!bar) {
return;
}
return bar;
}
funcThatReturns('foo');
// `no-class-assign`.
class NoClassAssign { }
noop(NoClassAssign);
// `no-const-assign`.
let noConstAssign = true;
noConstAssign = false;
noop(noConstAssign);
// `no-constant-condition`.
const noConstantCondition = true;
if (noConstantCondition) {
noop(noConstantCondition);
}
// `no-dupe-class-members`.
class NoDupeClassMembers {
bar() {
return 'bar';
}
foo() {
return 'foo';
}
}
noop(NoDupeClassMembers);
// `no-labels`.
const noLabels = { label: true };
while (noLabels.label) {
break;
}
// `no-multi-str`.
const noMultiStr = `Line 1
Line 2`;
noop(noMultiStr);
// `no-multiple-empty-lines`.
const noMultipleEmptyLines = true;
noop(noMultipleEmptyLines);
// `no-spaced-func`.
noop();
// `no-this-before-super`.
const NoThisBeforeSuper = require('no-this-before-super');
class Child extends NoThisBeforeSuper {
constructor() {
super();
this.foo = 'bar';
}
}
noop(Child);
// `no-unused-vars`.
for (const usedVar of noop()) {
noop(usedVar);
}
// `no-warning-comments`.
// TODO: do something.
// FIXME: this is not a good idea.
// `new-with-error`.
try {
noop();
} catch (e) {
throw new Error();
}
// `object-curly-spacing`.
const objectCurlySpacing1 = { foo: 'bar' };
const objectCurlySpacing2 = {};
noop(objectCurlySpacing1);
noop(objectCurlySpacing2);
// `one-var`, `one-var-declaration-per-line`.
const oneVar1 = 'foo';
const oneVar2 = 'bar';
noop(oneVar1);
noop(oneVar2);
// `operator-linebreak`.
const operatorLineBreak = 1 + 2;
noop(operatorLineBreak);
// `padded-blocks`.
class PaddedBlocks {
constructor() {
switch (true) {
default: noop();
}
}
}
noop(new PaddedBlocks());
// `padding-line-between-statements`.
const paddingLineBetweenStatements = 'foo';
noop(paddingLineBetweenStatements);
// `quote-props`.
const quoteProps = {
0: 0,
foo: 0,
'foo-bar': 0,
null: 0,
true: 0
};
noop(quoteProps);
// `quotes`.
const quotes1 = 'foo';
const quotes2 = `foo`;
noop(quotes1);
noop(quotes2);
// `require-await`.
(async () => {
await noop();
})();
// `semi`.
noop();
// `semi-spacing`.
for (let semiSpacing = 0; semiSpacing < 10; ++semiSpacing) {
noop();
}
// `sort-imports`.
import 'import-1';
import * as Import6 from 'import-2';
import { Import5, import4 } from 'import-3';
import { import3 } from 'import-4';
import Import2 from 'import-5';
import import1 from 'import-6';
noop(Import2);
noop(Import5);
noop(Import6);
noop(import1);
noop(import3);
noop(import4);
// `sort-keys`.
const sortObjectProps = {
var1: 'foo',
var9: 'bar',
var10: 'biz'
};
noop(sortObjectProps);
// `space-before-function-paren`.
(function() {
noop();
})();
// `space-in-parens`.
noop('foo');
// `space-infix-ops`.
const spaceInfixOps = 1 + 2;
noop(spaceInfixOps);
// `space-unary-ops`.
let spaceUnaryOps1 = 1;
const spaceUnaryOps2 = ++spaceUnaryOps1;
noop(spaceUnaryOps2);
// `spaced-comment`.
// Spaced comment.
// `sql-template/no-unsafe-query`.
const db = {
query: noop()
};
const foo = 'foo';
const sql = 'sql-tag';
db.query(sql`SELECT ${foo} FROM bar`);
db.query(`SELECT foo FROM bar`);
// `switch-case/newline-between-switch-case`.
switch (true) {
case 'foobar':
case 'foobiz':
break;
default:
return;
}
// `template-curly-spacing`.
const templateCurlySpacing = 'foo';
noop(`${templateCurlySpacing}`);
// `wrap-iife`.
(function() {
noop();
})();
// `sort-destructure-keys/sort-destructure-keys`.
const props = { KeyA: 1, keyA: 2, keyB: 3, keyZ: 4 };
const { KeyA, keyA, keyB, keyZ, ...rest } = props;
noop(KeyA);
noop(keyA);
noop(keyB);
noop(keyZ);
noop(rest);
// `yoda`.
let yoda = true;
if (yoda === true) {
yoda = false;
}
// `react-hooks/exhaustive-deps`.
const ExhaustiveDeps = ({ foo }) => {
React.useEffect(() => foo());
React.useEffect(() => foo(), [foo]);
React.useMemo(() => foo(), [foo]);
React.useCallback(() => foo(), [foo]);
const [bar, setBar] = React.useState();
React.useEffect(() => setBar(!bar), [bar]);
return null;
};
noop(ExhaustiveDeps);
// `react-hooks/rules-of-hooks`.
const RulesOfHooks = () => {
React.useState();
return null;
};
function useHook() {
React.useState();
}
noop(RulesOfHooks, useHook);
// `react/jsx-curly-brace-presence`.
const CurlyBracePresence = () => (
<div foo={'bar'} />
);
noop(CurlyBracePresence);
// `react/jsx-no-literals`.
const NoLiterals = () => (
<div>
{'qux'}
</div>
);
noop(NoLiterals);
// `react/jsx-tag-spacing`.
const TagSpacing = () => (
<div />
);
noop(TagSpacing);
// `react/prefer-stateless-function`.
class PreferStatelessFunction extends React.Component {
getFoo = () => {
return 'foo';
}
render() {
return this.getFoo();
}
}
noop(PreferStatelessFunction);
<file_sep>/test/index.js
'use strict';
/**
* Module dependencies.
*/
const CLIEngine = require('eslint').CLIEngine;
const path = require('path');
/**
* Tests for `eslint-config-seegno`.
*/
describe('eslint-config-seegno', () => {
const linter = new CLIEngine({ configFile: path.join(__dirname, '..', 'src', 'index.js') });
it('should not generate any violation for correct code', () => {
const source = path.join(__dirname, 'fixtures', 'correct.js');
const results = linter.executeOnFiles([source]);
expect(results.errorCount).toEqual(0);
});
it('should generate violations for environment-specific rules', () => {
const source = path.join(__dirname, 'fixtures', 'environment.js');
const violations = linter
.executeOnFiles([source])
.results[0]
.messages
.map(violation => violation.ruleId);
expect(violations).toEqual([
'linebreak-style',
'linebreak-style',
'linebreak-style',
'linebreak-style',
'eol-last'
]);
});
it('should generate violations for incorrect code', () => {
const source = path.join(__dirname, 'fixtures', 'incorrect.js');
const violations = linter
.executeOnFiles([source])
.results[0]
.messages
.reduce(
(result, { ruleId }) => {
result[ruleId] = (result[ruleId] || 0) + 1;
return result;
},
{}
);
expect(violations).toEqual({
'array-bracket-spacing': 1,
'arrow-parens': 1,
'brace-style': 1,
'capitalized-comments': 1,
'comma-dangle': 1,
'comma-spacing': 1,
'comma-style': 1,
'consistent-this': 1,
curly: 1,
'dot-notation': 1,
'flowtype/delimiter-dangle': 2,
'generator-star-spacing': 5,
'id-match': 1,
indent: 1,
'jest/no-disabled-tests': 9,
'jest/no-focused-tests': 5,
'jest/no-identical-title': 1,
'key-spacing': 1,
'keyword-spacing': 1,
'mocha/no-exclusive-tests': 3,
'new-cap': 1,
'new-with-error/new-with-error': 1,
'newline-before-return': 1,
'no-class-assign': 1,
'no-console': 1,
'no-const-assign': 1,
'no-constant-condition': 1,
'no-dupe-class-members': 1,
'no-empty': 1,
'no-fallthrough': 1,
'no-labels': 2,
'no-multi-spaces': 1,
'no-multi-str': 1,
'no-multiple-empty-lines': 1,
'no-new': 1,
'no-spaced-func': 1,
'no-this-before-super': 1,
'no-undef': 1,
'no-underscore-dangle': 1,
'no-unused-vars': 2,
'object-curly-spacing': 2,
'one-var': 1,
'one-var-declaration-per-line': 1,
'operator-linebreak': 1,
'padded-blocks': 6,
'padding-line-between-statements': 1,
'quote-props': 4,
quotes: 1,
'react/jsx-curly-brace-presence': 1,
'react/jsx-no-literals': 1,
'react/jsx-tag-spacing': 5,
'react/prefer-stateless-function': 1,
'react-hooks/exhaustive-deps': 5,
'react-hooks/rules-of-hooks': 4,
semi: 1,
'semi-spacing': 2,
'sort-destructure-keys/sort-destructure-keys': 1,
'sort-imports-es6/sort-imports-es6': 1,
'sort-keys': 1,
'space-before-blocks': 1,
'space-before-function-paren': 1,
'space-in-parens': 2,
'space-infix-ops': 1,
'space-unary-ops': 1,
'spaced-comment': 1,
'sql-template/no-unsafe-query': 1,
'switch-case/newline-between-switch-case': 2,
'template-curly-spacing': 2,
'wrap-iife': 1,
yoda: 1
});
});
});
<file_sep>/src/index.js
/**
* Export `seegno` shared configuration preset.
*/
module.exports = {
env: {
es6: true,
jasmine: true,
jest: true,
mocha: true,
node: true
},
extends: ['eslint:recommended'],
parser: 'babel-eslint',
plugins: [
'flowtype',
'jest',
'mocha',
'new-with-error',
'react-hooks',
'react',
'sort-class-members',
'sort-destructure-keys',
'sort-imports-es6',
'sql-template',
'switch-case'
],
root: true,
rules: {
'accessor-pairs': 'error',
'array-bracket-spacing': 'error',
'arrow-parens': ['error', 'as-needed'],
'arrow-spacing': 'error',
'block-scoped-var': 'error',
'block-spacing': 'off',
'brace-style': ['error', '1tbs', {
allowSingleLine: true
}],
camelcase: 'off',
'capitalized-comments': ['error', 'always', {
ignoreConsecutiveComments: true
}],
'comma-dangle': 'error',
'comma-spacing': 'error',
'comma-style': 'error',
complexity: 'off',
'computed-property-spacing': 'error',
'consistent-return': 'off',
'consistent-this': ['error', 'self'],
curly: 'error',
'default-case': 'error',
'dot-location': ['error', 'property'],
'dot-notation': 'error',
'eol-last': 'error',
eqeqeq: ['error', 'smart'],
'flowtype/boolean-style': 'error',
'flowtype/define-flow-type': 'error',
'flowtype/delimiter-dangle': ['error', 'never', 'always'],
'flowtype/generic-spacing': 'error',
'flowtype/no-dupe-keys': 'error',
'flowtype/require-valid-file-annotation': ['error', 'always'],
'flowtype/semi': 'error',
'flowtype/sort-keys': 'error',
'flowtype/space-after-type-colon': 'error',
'flowtype/space-before-generic-bracket': 'error',
'flowtype/union-intersection-spacing': 'error',
'flowtype/use-flow-type': 'error',
'func-names': 'off',
'func-style': ['error', 'declaration', {
allowArrowFunctions: true
}],
'generator-star-spacing': ['error', 'before'],
'id-length': ['error', {
exceptions: ['_', 'e', 'i']
}],
'id-match': ['error', '^_$|^[a-zA-Z][a-zA-Z0-9]*$|^[A-Z][_A-Z0-9]+[A-Z0-9]$', {
onlyDeclarations: true,
properties: true
}],
indent: ['error', 2, {
SwitchCase: 1
}],
'jest/no-disabled-tests': 'error',
'jest/no-focused-tests': 'error',
'jest/no-identical-title': 'error',
'jsx-quotes': ['error', 'prefer-single'],
'key-spacing': 'error',
'keyword-spacing': 'error',
'linebreak-style': 'error',
'lines-around-comment': 'off',
'max-depth': 'error',
'max-nested-callbacks': 'off',
'max-params': ['error', 4],
'mocha/no-exclusive-tests': 'error',
'new-cap': 'error',
'new-parens': 'error',
'new-with-error/new-with-error': 'error',
'newline-before-return': 'error',
'no-alert': 'error',
'no-array-constructor': 'error',
'no-bitwise': 'error',
'no-caller': 'error',
'no-catch-shadow': 'off',
'no-cond-assign': ['error', 'always'],
'no-confusing-arrow': 'error',
'no-console': 'error',
'no-div-regex': 'error',
'no-duplicate-imports': 'error',
'no-else-return': 'error',
'no-empty': 'error',
'no-empty-label': 'off',
'no-eq-null': 'error',
'no-eval': 'error',
'no-extend-native': 'error',
'no-extra-bind': 'error',
'no-extra-parens': ['error', 'all', {
ignoreJSX: 'all'
}],
'no-floating-decimal': 'error',
'no-implied-eval': 'error',
'no-inline-comments': 'error',
'no-iterator': 'error',
'no-label-var': 'off',
'no-labels': 'error',
'no-lone-blocks': 'error',
'no-lonely-if': 'error',
'no-loop-func': 'error',
'no-mixed-requires': 'error',
'no-multi-spaces': 'error',
'no-multi-str': 'error',
'no-multiple-empty-lines': ['error', {
max: 1
}],
'no-native-reassign': 'error',
'no-nested-ternary': 'error',
'no-new': 'error',
'no-new-func': 'error',
'no-new-object': 'error',
'no-new-require': 'error',
'no-new-wrappers': 'error',
'no-octal-escape': 'error',
'no-path-concat': 'error',
'no-process-env': 'error',
'no-process-exit': 'error',
'no-proto': 'error',
'no-restricted-modules': 'error',
'no-return-assign': 'error',
'no-script-url': 'error',
'no-self-compare': 'error',
'no-sequences': 'error',
'no-shadow': 'off',
'no-shadow-restricted-names': 'error',
'no-spaced-func': 'error',
'no-sync': 'error',
'no-throw-literal': 'error',
'no-trailing-spaces': 'error',
'no-undef-init': 'error',
'no-undefined': 'off',
'no-underscore-dangle': 'error',
'no-unexpected-multiline': 'error',
'no-unneeded-ternary': 'error',
'no-unused-expressions': 'error',
'no-use-before-define': 'error',
'no-useless-call': 'error',
'no-useless-concat': 'error',
'no-var': 'error',
'no-void': 'error',
'no-warning-comments': 'off',
'no-with': 'error',
'object-curly-spacing': ['error', 'always'],
'object-shorthand': 'error',
'one-var': ['error', 'never'],
'one-var-declaration-per-line': ['error', 'always'],
'operator-assignment': 'error',
'operator-linebreak': ['error', 'none'],
'padded-blocks': ['error', { blocks: 'never', classes: 'always', switches: 'never' }],
'padding-line-between-statements': [
'error',
{ blankLine: 'always', next: '*', prev: ['const', 'let', 'var'] },
{ blankLine: 'any', next: ['const', 'let', 'var'], prev: ['const', 'let', 'var'] }
],
'prefer-arrow-callback': 'error',
'prefer-const': 'error',
'prefer-spread': 'error',
'prefer-template': 'error',
'quote-props': ['error', 'as-needed'],
quotes: ['error', 'single', {
allowTemplateLiterals: true
}],
radix: 'error',
'react/display-name': 'error',
'react/jsx-boolean-value': 'error',
'react/jsx-closing-bracket-location': 'error',
'react/jsx-curly-brace-presence': ['error', {
children: 'ignore',
props: 'always'
}],
'react/jsx-curly-spacing': 'error',
'react/jsx-indent': ['error', 2],
'react/jsx-indent-props': ['error', 2],
'react/jsx-key': 'error',
'react/jsx-max-props-per-line': 'error',
'react/jsx-no-duplicate-props': 'error',
'react/jsx-no-literals': 'error',
'react/jsx-no-undef': 'error',
'react/jsx-sort-props': 'error',
'react/jsx-tag-spacing': ['error', {
afterOpening: 'never',
beforeClosing: 'never',
beforeSelfClosing: 'always',
closingSlash: 'never'
}],
'react/jsx-uses-react': 'error',
'react/jsx-uses-vars': 'error',
'react/jsx-wrap-multilines': 'error',
'react/no-danger': 'error',
'react/no-direct-mutation-state': 'error',
'react/no-string-refs': 'error',
'react/no-unknown-property': 'error',
'react/prefer-es6-class': 'error',
'react/prefer-stateless-function': 'error',
'react/react-in-jsx-scope': 'error',
'react/self-closing-comp': 'error',
'react/sort-comp': ['error', {
groups: {
initialization: [
'displayName',
'propTypes',
'contextTypes',
'childContextTypes',
'mixins',
'statics',
'defaultProps',
'constructor',
'getDefaultProps',
'state',
'getInitialState',
'getChildContext'
],
lifecycle: [
'componentWillMount',
'UNSAFE_componentWillMount',
'componentDidMount',
'componentWillReceiveProps',
'UNSAFE_componentWillReceiveProps',
'shouldComponentUpdate',
'componentWillUpdate',
'UNSAFE_componentWillUpdate',
'getSnapshotBeforeUpdate',
'componentDidUpdate',
'componentDidCatch',
'componentWillUnmount'
]
},
order: [
'static-methods',
'initialization',
'everything-else',
'/^handle.+$/',
'lifecycle',
'/^render.+$/',
'render'
]
}],
'react/sort-prop-types': 'error',
'react-hooks/exhaustive-deps': 'error',
'react-hooks/rules-of-hooks': 'error',
'require-await': 'error',
'require-yield': 'error',
semi: 'error',
'semi-spacing': 'error',
'sort-destructure-keys/sort-destructure-keys': ['error', {
caseSensitive: true
}],
'sort-imports-es6/sort-imports-es6': ['error', {
ignoreCase: false,
ignoreMemberSort: false,
memberSyntaxSortOrder: ['none', 'all', 'multiple', 'single']
}],
'sort-keys': ['error', 'asc', {
natural: true
}],
'space-before-blocks': 'error',
'space-before-function-paren': ['error', { anonymous: 'never', named: 'never' }],
'space-in-parens': 'error',
'space-infix-ops': 'error',
'space-unary-ops': 'error',
'spaced-comment': 'error',
'sql-template/no-unsafe-query': 'error',
strict: 'off',
'switch-case/newline-between-switch-case': ['error', 'always', { fallthrough: 'never' }],
'template-curly-spacing': 'error',
'valid-jsdoc': 'error',
'vars-on-top': 'error',
'wrap-iife': ['error', 'inside'],
yoda: 'error'
},
settings: {
flowtype: {
onlyFilesWithFlowAnnotation: true
}
}
};
<file_sep>/test/fixtures/environment.js
// Incorrect environment-specific (os or editor) settings.
// `linebreak-style` - Windows line endings (CRLF).
// `eol-last` - no newline at the end of the file. | c5a85a04bf1df406365701796b2e8bafb1cf773b | [
"Markdown",
"JavaScript"
] | 6 | Markdown | seegno/eslint-config-seegno | 44959dcf753cdb4e6dd0255eb188c5b91f3fdc8e | 1630265d14605fc66f07aa24d039215580726570 |
refs/heads/master | <repo_name>slimjimsoftware/ace-diff3<file_sep>/src/visuals/getTheme.js
const C = require('../constants');
module.exports = function getTheme(acediff3, editor) {
let { theme } = acediff3.options;
if (editor === C.EDITOR_COMMON && acediff3.options.common.theme !== null) {
theme = acediff3.options.common.theme;
}
return theme;
};
<file_sep>/src/visuals/getMode.js
const C = require('../constants');
module.exports = function getMode(acediff3, editor) {
let { mode } = acediff3.options;
if (editor === C.EDITOR_COMMON && acediff3.options.common.mode !== null) {
mode = acediff3.options.common.mode;
}
return mode;
};
<file_sep>/src/index.js
/* eslint-disable max-len,no-nested-ternary */
/* eslint-disable no-console,no-use-before-define,camelcase,no-param-reassign,no-plusplus,block-scoped-var,no-redeclare,no-var,vars-on-top */
// Diffing library
const DiffMatchPatch = require('diff-match-patch');
const merge = require('./helpers/merge');
const throttle = require('./helpers/throttle');
const debounce = require('./helpers/debounce');
const normalizeContent = require('./helpers/normalizeContent');
const getCurve = require('./visuals/getCurve');
const getMode = require('./visuals/getMode');
const getTheme = require('./visuals/getTheme');
const getLine = require('./visuals/getLine');
const getEditorHeight = require('./visuals/getEditorHeight');
const createArrow = require('./visuals/createArrow');
const ensureElement = require('./dom/ensureElement');
const query = require('./dom/query');
const C = require('./constants');
// Range module placeholder
let Range;
function getRangeModule(ace) {
if (ace.Range) {
return ace.Range;
}
const requireFunc = (ace.acequire || ace.require);
if (requireFunc) {
return requireFunc('ace/range');
}
return false;
}
// our constructor
function AceDiff3(options = {}) {
// Ensure instance is a constructor with `new`
if (!(this instanceof AceDiff3)) {
return new AceDiff3(options);
}
// Current instance we pass around to other functions
const acediff3 = this;
const getDefaultAce = () => (window ? window.ace : undefined);
acediff3.options = merge({
ace: getDefaultAce(),
mode: null,
theme: null,
element: null,
diffGranularity: C.DIFF_GRANULARITY_BROAD,
lockScrolling: false, // not implemented yet
showDiffs: true,
showConnectors: true,
maxDiffs: 5000,
left: {
id: null,
content: null,
mode: null,
theme: null,
editable: true,
copyLinkEnabled: true,
},
common: {
id: null,
content: null,
mode: null,
theme: null,
editable: true,
copyLinkEnabled: false,
},
right: {
id: null,
content: null,
mode: null,
theme: null,
editable: false,
copyLinkEnabled: true,
},
classes: {
gutter1: 'acediff3__gutter1',
gutter2: 'acediff3__gutter1',
diff: 'acediff3__diffLine',
connector: 'acediff3__connector',
newCodeConnectorLink: 'acediff3__newCodeConnector',
newCodeConnectorLinkContent: '⟶',
deletedCodeConnectorLink: 'acediff3__deletedCodeConnector',
deletedCodeConnectorLinkContent: '⟵',
copyRightContainer: 'acediff3__copy--right',
copyLeftContainer: 'acediff3__copy--left',
},
connectorYOffset: 0,
}, options);
const { ace } = acediff3.options;
if (!ace) {
const errMessage = 'No ace editor found nor supplied - `options.ace` or `window.ace` is missing';
console.error(errMessage);
return new Error(errMessage);
}
Range = getRangeModule(ace);
if (!Range) {
const errMessage = 'Could not require Range module for Ace. Depends on your bundling strategy, but it usually comes with Ace itself. See https://ace.c9.io/api/range.html, open an issue on GitHub ace-diff/ace-diff';
console.error(errMessage);
return new Error(errMessage);
}
if (acediff3.options.element === null) {
const errMessage = 'You need to specify an element for Ace-diff - `options.element` is missing';
console.error(errMessage);
return new Error(errMessage);
}
if (acediff3.options.element instanceof HTMLElement) {
acediff3.el = acediff3.options.element;
} else {
acediff3.el = document.body.querySelector(acediff3.options.element);
}
if (!acediff3.el) {
const errMessage = `Can't find the specified element ${acediff3.options.element}`;
console.error(errMessage);
return new Error(errMessage);
}
acediff3.options.left.id = ensureElement(acediff3.el, 'acediff3__left');
acediff3.options.classes.gutter1 = ensureElement(acediff3.el, 'acediff3__gutter1');
acediff3.options.common.id = ensureElement(acediff3.el, 'acediff3__common');
acediff3.options.classes.gutter2 = ensureElement(acediff3.el, 'acediff3__gutter2');
acediff3.options.right.id = ensureElement(acediff3.el, 'acediff3__right');
acediff3.el.innerHTML = `<div class="acediff3__wrap">${acediff3.el.innerHTML}</div>`;
// instantiate the editors in an internal data structure
// that will store a little info about the diffs and
// editor content
acediff3.editors = {
left: {
ace: ace.edit(acediff3.options.left.id),
markers: [],
lineLengths: [],
},
common: {
ace: ace.edit(acediff3.options.common.id),
markers: [],
lineLengths: [],
},
right: {
ace: ace.edit(acediff3.options.right.id),
markers: [],
lineLengths: [],
},
editorHeight: null,
};
// set up the editors
acediff3.editors.left.ace.getSession().setMode(getMode(acediff3, C.EDITOR_LEFT));
acediff3.editors.common.ace.getSession().setMode(getMode(acediff3, C.EDITOR_COMMON));
acediff3.editors.right.ace.getSession().setMode(getMode(acediff3, C.EDITOR_RIGHT));
acediff3.editors.left.ace.setReadOnly(!acediff3.options.left.editable);
acediff3.editors.common.ace.setReadOnly(!acediff3.options.common.editable);
acediff3.editors.right.ace.setReadOnly(!acediff3.options.right.editable);
acediff3.editors.left.ace.setTheme(getTheme(acediff3, C.EDITOR_LEFT));
acediff3.editors.common.ace.setTheme(getTheme(acediff3, C.EDITOR_COMMON));
acediff3.editors.right.ace.setTheme(getTheme(acediff3, C.EDITOR_RIGHT));
acediff3.editors.left.ace.setValue(normalizeContent(acediff3.options.left.content), -1);
acediff3.editors.common.ace.setValue(normalizeContent(acediff3.options.common.content), -1);
acediff3.editors.right.ace.setValue(normalizeContent(acediff3.options.right.content), -1);
// store the visible height of the editors (assumed the same)
acediff3.editors.editorHeight = getEditorHeight(acediff3);
// The lineHeight is set to 0 initially and we need to wait for another tick to get it
// Thus moving the diff() with it
setTimeout(() => {
// assumption: editors have same line heights
acediff3.lineHeight = acediff3.editors.common.ace.renderer.lineHeight;
addEventHandlers(acediff3);
createCopyContainers(acediff3);
createGutter(acediff3);
acediff3.diff();
}, 1);
}
// our public API
AceDiff3.prototype = {
// allows on-the-fly changes to the AceDiff instance settings
setOptions(options) {
merge(this.options, options);
this.diff();
},
getNumDiffs() {
return this.diffs1.length + this.diffs2.length;
},
// exposes the Ace editors in case the dev needs it
getEditors() {
return {
left: this.editors.left.ace,
common: this.editors.common.ace,
right: this.editors.right.ace,
};
},
// our main diffing function. I actually don't think this needs to exposed: it's called automatically,
// but just to be safe, it's included
diff() {
const dmp = new DiffMatchPatch();
const val1 = this.editors.left.ace.getSession().getValue();
const val2 = this.editors.common.ace.getSession().getValue();
const val3 = this.editors.right.ace.getSession().getValue();
const diff1 = dmp.diff_main(val2, val1);
const diff2 = dmp.diff_main(val3, val2);
dmp.diff_cleanupSemantic(diff1);
dmp.diff_cleanupSemantic(diff2);
this.editors.left.lineLengths = getLineLengths(this.editors.left);
this.editors.common.lineLengths = getLineLengths(this.editors.common);
this.editors.right.lineLengths = getLineLengths(this.editors.right);
// parse the raw diff into something a little more palatable
const diffs1 = [];
const offset1 = {
left: 0,
right: 0,
};
const diffs2 = [];
const offset2 = {
left: 0,
right: 0,
};
diff1.forEach((chunk, index, array) => {
const chunkType = chunk[0];
let text = chunk[1];
// Fix for #28 https://github.com/ace-diff/ace-diff/issues/28
if (array[index + 1] && text.endsWith('\n') && array[index + 1][1].startsWith('\n')) {
text += '\n';
diff1[index][1] = text;
diff1[index + 1][1] = diff1[index + 1][1].replace(/^\n/, '');
}
// oddly, occasionally the algorithm returns a diff with no changes made
if (text.length === 0) {
return;
}
if (chunkType === C.DIFF_EQUAL) {
offset1.left += text.length;
offset1.right += text.length;
} else if (chunkType === C.DIFF_DELETE) {
diffs1.push(computeDiff(this, C.DIFF_DELETE, this.editors.left, this.editors.common, offset1.left, offset1.right, text));
offset1.right += text.length;
} else if (chunkType === C.DIFF_INSERT) {
diffs1.push(computeDiff(this, C.DIFF_INSERT, this.editors.left, this.editors.common, offset1.left, offset1.right, text));
offset1.left += text.length;
}
}, this);
diff2.forEach((chunk, index, array) => {
const chunkType = chunk[0];
let text = chunk[1];
// Fix for #28 https://github.com/ace-diff/ace-diff/issues/28
if (array[index + 1] && text.endsWith('\n') && array[index + 1][1].startsWith('\n')) {
text += '\n';
diff2[index][1] = text;
diff2[index + 1][1] = diff2[index + 1][1].replace(/^\n/, '');
}
// oddly, occasionally the algorithm returns a diff with no changes made
if (text.length === 0) {
return;
}
if (chunkType === C.DIFF_EQUAL) {
offset2.left += text.length;
offset2.right += text.length;
} else if (chunkType === C.DIFF_DELETE) {
diffs2.push(computeDiff(this, C.DIFF_DELETE, this.editors.common, this.editors.right, offset2.left, offset2.right, text));
offset2.right += text.length;
} else if (chunkType === C.DIFF_INSERT) {
diffs2.push(computeDiff(this, C.DIFF_INSERT, this.editors.common, this.editors.right, offset2.left, offset2.right, text));
offset2.left += text.length;
}
}, this);
// simplify our computed diffs; this groups together multiple diffs on subsequent lines
this.diffs1 = simplifyDiffs(this, diffs1);
this.diffs2 = simplifyDiffs(this, diffs2);
// if we're dealing with too many diffs, fail silently
if (this.diffs1.length + this.diffs2.length > this.options.maxDiffs) {
return;
}
clearDiffs(this);
decorate(this);
},
destroy() {
// destroy the editors
const leftValue = this.editors.left.ace.getValue();
this.editors.left.ace.destroy();
let oldDiv = this.editors.left.ace.container;
let newDiv = oldDiv.cloneNode(false);
newDiv.textContent = leftValue;
oldDiv.parentNode.replaceChild(newDiv, oldDiv);
const commonValue = this.editors.common.ace.getValue();
this.editors.common.ace.destroy();
oldDiv = this.editors.common.ace.container;
newDiv = oldDiv.cloneNode(false);
newDiv.textContent = commonValue;
oldDiv.parentNode.replaceChild(newDiv, oldDiv);
const rightValue = this.editors.right.ace.getValue();
this.editors.right.ace.destroy();
oldDiv = this.editors.right.ace.container;
newDiv = oldDiv.cloneNode(false);
newDiv.textContent = rightValue;
oldDiv.parentNode.replaceChild(newDiv, oldDiv);
document.getElementById(this.options.classes.gutter1).innerHTML = '';
document.getElementById(this.options.classes.gutter2).innerHTML = '';
removeEventHandlers();
},
};
let removeEventHandlers = () => { };
function addEventHandlers(acediff3) {
acediff3.editors.left.ace.getSession().on('changeScrollTop', throttle(() => { updateGap(acediff3); }, 16));
acediff3.editors.common.ace.getSession().on('changeScrollTop', throttle(() => { updateGap(acediff3); }, 16));
acediff3.editors.right.ace.getSession().on('changeScrollTop', throttle(() => { updateGap(acediff3); }, 16));
const diff = acediff3.diff.bind(acediff3);
acediff3.editors.left.ace.on('change', diff);
acediff3.editors.common.ace.on('change', diff);
acediff3.editors.right.ace.on('change', diff);
if (acediff3.options.left.copyLinkEnabled) {
query.on(`#${acediff3.options.classes.gutter1}`, 'click', `.${acediff3.options.classes.newCodeConnectorLink}`, (e) => {
copy(acediff3, e, C.LTR);
});
}
if (acediff3.options.right.copyLinkEnabled) {
query.on(`#${acediff3.options.classes.gutter2}`, 'click', `.${acediff3.options.classes.deletedCodeConnectorLink}`, (e) => {
copy(acediff3, e, C.RTL);
});
}
const onResize = debounce(() => {
// eslint-disable-next-line no-param-reassign
acediff3.editors.availableHeight = document.getElementById(acediff3.options.common.id).offsetHeight;
// TODO this should re-init gutter
acediff3.diff();
}, 250);
window.addEventListener('resize', onResize);
removeEventHandlers = () => {
window.removeEventListener('resize', onResize);
};
}
function copy(acediff3, e, dir) {
const diffIndex = parseInt(e.target.getAttribute('data-diff-index'), 10);
const diff = dir === C.LTR ? acediff3.diffs1[diffIndex] : acediff3.diffs2[diffIndex];
let sourceEditor;
let targetEditor;
let startLine;
let endLine;
let targetStartLine;
let targetEndLine;
if (dir === C.LTR) {
sourceEditor = acediff3.editors.left;
targetEditor = acediff3.editors.common;
startLine = diff.leftStartLine;
endLine = diff.leftEndLine;
targetStartLine = diff.rightStartLine;
targetEndLine = diff.rightEndLine;
} else {
sourceEditor = acediff3.editors.right;
targetEditor = acediff3.editors.common;
startLine = diff.rightStartLine;
endLine = diff.rightEndLine;
targetStartLine = diff.leftStartLine;
targetEndLine = diff.leftEndLine;
}
let contentToInsert = '';
for (let i = startLine; i < endLine; i += 1) {
contentToInsert += `${getLine(sourceEditor, i)}\n`;
}
// keep track of the scroll height
const h = targetEditor.ace.getSession().getScrollTop();
targetEditor.ace.getSession().replace(new Range(targetStartLine, 0, targetEndLine, 0), contentToInsert);
targetEditor.ace.getSession().setScrollTop(parseInt(h, 10));
acediff3.diff();
}
function getLineLengths(editor) {
const lines = editor.ace.getSession().doc.getAllLines();
const lineLengths = [];
lines.forEach((line) => {
lineLengths.push(line.length + 1); // +1 for the newline char
});
return lineLengths;
}
// shows a diff in one of the two editors.
function showDiff(acediff3, editor, startLine, endLine, className) {
const editorInstance = acediff3.editors[editor];
if (endLine < startLine) { // can this occur? Just in case.
endLine = startLine;
}
const classNames = `${className} ${(endLine > startLine) ? 'lines' : 'targetOnly'}`;
// to get Ace to highlight the full row we just set the start and end chars to 0 and 1
editorInstance.markers.push(
editorInstance.ace.session.addMarker(
new Range(
startLine,
0,
endLine - 1 /* because endLine is always + 1 */,
1,
), classNames, 'fullLine',
),
);
}
// called onscroll. Updates the gap to ensure the connectors are all lining up
function updateGap(acediff) {
clearDiffs(acediff);
decorate(acediff);
// reposition the copy containers containing all the arrows
positionCopyContainers(acediff);
}
function clearDiffs(acediff3) {
acediff3.editors.left.markers.forEach((marker) => {
acediff3.editors.left.ace.getSession().removeMarker(marker);
}, acediff3);
acediff3.editors.common.markers.forEach((marker) => {
acediff3.editors.common.ace.getSession().removeMarker(marker);
}, acediff3);
acediff3.editors.right.markers.forEach((marker) => {
acediff3.editors.right.ace.getSession().removeMarker(marker);
}, acediff3);
}
function addConnector(acediff3, leftEditor, rightEditor, leftStartLine, leftEndLine, rightStartLine, rightEndLine) {
const leftScrollTop = leftEditor.ace.getSession().getScrollTop();
const rightScrollTop = rightEditor.ace.getSession().getScrollTop();
// All connectors, regardless of ltr or rtl
// have the same point system, even if p1 === p3 or p2 === p4
// p1 p2
//
// p3 p4
acediff3.connectorYOffset = 1;
const p1_x = -1;
const p1_y = (leftStartLine * acediff3.lineHeight) - leftScrollTop + 0.5;
const p2_x = acediff3.gutterWidth + 1;
const p2_y = rightStartLine * acediff3.lineHeight - rightScrollTop + 0.5;
const p3_x = -1;
const p3_y = (leftEndLine * acediff3.lineHeight) - leftScrollTop + acediff3.connectorYOffset + 0.5;
const p4_x = acediff3.gutterWidth + 1;
const p4_y = (rightEndLine * acediff3.lineHeight) - rightScrollTop + acediff3.connectorYOffset + 0.5;
const curve1 = getCurve(p1_x, p1_y, p2_x, p2_y);
const curve2 = getCurve(p4_x, p4_y, p3_x, p3_y);
const verticalLine1 = `L${p2_x},${p2_y} ${p4_x},${p4_y}`;
const verticalLine2 = `L${p3_x},${p3_y} ${p1_x},${p1_y}`;
const d = `${curve1} ${verticalLine1} ${curve2} ${verticalLine2}`;
const el = document.createElementNS(C.SVG_NS, 'path');
el.setAttribute('d', d);
el.setAttribute('class', acediff3.options.classes.connector);
if (leftEditor === acediff3.editors.left) {
acediff3.gutterSVG1.appendChild(el);
} else {
acediff3.gutterSVG2.appendChild(el);
}
}
function addCopyArrows(acediff3, info, diffIndex, isLeftContainer) {
if (isLeftContainer) {
if (info.leftEndLine > info.leftStartLine && acediff3.options.left.copyLinkEnabled) {
const arrow = createArrow({
className: acediff3.options.classes.newCodeConnectorLink,
topOffset: info.leftStartLine * acediff3.lineHeight,
tooltip: 'Copy to right',
diffIndex,
arrowContent: acediff3.options.classes.newCodeConnectorLinkContent,
});
acediff3.copyRightContainer.appendChild(arrow);
}
} else {
if (info.rightEndLine > info.rightStartLine && acediff3.options.right.copyLinkEnabled) {
const arrow = createArrow({
className: acediff3.options.classes.deletedCodeConnectorLink,
topOffset: info.rightStartLine * acediff3.lineHeight,
tooltip: 'Copy to left',
diffIndex,
arrowContent: acediff3.options.classes.deletedCodeConnectorLinkContent,
});
acediff3.copyLeftContainer.appendChild(arrow);
}
}
}
function positionCopyContainers(acediff3) {
const commonTopOffset = acediff3.editors.common.ace.getSession().getScrollTop();
acediff3.copyRightContainer.style.cssText = `top: ${-commonTopOffset}px`;
acediff3.copyLeftContainer.style.cssText = `top: ${-commonTopOffset}px`;
}
/**
// eslint-disable-next-line max-len
* This method takes the raw diffing info from the Google lib and returns a nice clean object of the following
* form:
* {
* leftStartLine:
* leftEndLine:
* rightStartLine:
* rightEndLine:
* }
*
* Ultimately, that's all the info we need to highlight the appropriate lines in the left + right editor, add the
* SVG connectors, and include the appropriate <<, >> arrows.
*
* Note: leftEndLine and rightEndLine are always the start of the NEXT line, so for a single line diff, there will
* be 1 separating the startLine and endLine values. So if leftStartLine === leftEndLine or rightStartLine ===
* rightEndLine, it means that new content from the other editor is being inserted and a single 1px line will be
* drawn.
*/
function computeDiff(acediff3, diffType, leftEditor, rightEditor, offsetLeft, offsetRight, diffText) {
let lineInfo = {};
// this was added in to hack around an oddity with the Google lib. Sometimes it would include a newline
// as the first char for a diff, other times not - and it would change when you were typing on-the-fly. This
// is used to level things out so the diffs don't appear to shift around
let newContentStartsWithNewline = /^\n/.test(diffText);
if (diffType === C.DIFF_INSERT) {
// pretty confident this returns the right stuff for the left editor: start & end line & char
var info = getSingleDiffInfo(leftEditor, offsetLeft, diffText);
// this is the ACTUAL undoctored current line in the other editor. It's always right. Doesn't mean it's
// going to be used as the start line for the diff though.
var currentLineOtherEditor = getLineForCharPosition(rightEditor, offsetRight);
var numCharsOnLineOtherEditor = getCharsOnLine(rightEditor, currentLineOtherEditor);
const numCharsOnLeftEditorStartLine = getCharsOnLine(leftEditor, info.startLine);
var numCharsOnLine = getCharsOnLine(leftEditor, info.startLine);
// this is necessary because if a new diff starts on the FIRST char of the left editor, the diff can comes
// back from google as being on the last char of the previous line so we need to bump it up one
let rightStartLine = currentLineOtherEditor;
if (numCharsOnLine === 0 && newContentStartsWithNewline) {
newContentStartsWithNewline = false;
}
if (info.startChar === 0 && isLastChar(rightEditor, offsetRight, newContentStartsWithNewline)) {
rightStartLine = currentLineOtherEditor + 1;
}
var sameLineInsert = info.startLine === info.endLine;
// whether or not this diff is a plain INSERT into the other editor, or overwrites a line take a little work to
// figure out. This feels like the hardest part of the entire script.
var numRows = 0;
if (
// dense, but this accommodates two scenarios:
// 1. where a completely fresh new line is being inserted in left editor, we want the line on right to stay a 1px line
// 2. where a new character is inserted at the start of a newline on the left but the line contains other stuff,
// we DO want to make it a full line
(info.startChar > 0 || (sameLineInsert && diffText.length < numCharsOnLeftEditorStartLine))
// if the right editor line was empty, it's ALWAYS a single line insert [not an OR above?]
&& numCharsOnLineOtherEditor > 0
// if the text being inserted starts mid-line
&& (info.startChar < numCharsOnLeftEditorStartLine)) {
numRows++;
}
lineInfo = {
leftStartLine: info.startLine,
leftEndLine: info.endLine + 1,
rightStartLine,
rightEndLine: rightStartLine + numRows,
};
} else {
var info = getSingleDiffInfo(rightEditor, offsetRight, diffText);
var currentLineOtherEditor = getLineForCharPosition(leftEditor, offsetLeft);
var numCharsOnLineOtherEditor = getCharsOnLine(leftEditor, currentLineOtherEditor);
const numCharsOnRightEditorStartLine = getCharsOnLine(rightEditor, info.startLine);
var numCharsOnLine = getCharsOnLine(rightEditor, info.startLine);
// this is necessary because if a new diff starts on the FIRST char of the left editor, the diff can comes
// back from google as being on the last char of the previous line so we need to bump it up one
let leftStartLine = currentLineOtherEditor;
if (numCharsOnLine === 0 && newContentStartsWithNewline) {
newContentStartsWithNewline = false;
}
if (info.startChar === 0 && isLastChar(leftEditor, offsetLeft, newContentStartsWithNewline)) {
leftStartLine = currentLineOtherEditor + 1;
}
var sameLineInsert = info.startLine === info.endLine;
var numRows = 0;
if (
// dense, but this accommodates two scenarios:
// 1. where a completely fresh new line is being inserted in left editor, we want the line on right to stay a 1px line
// 2. where a new character is inserted at the start of a newline on the left but the line contains other stuff,
// we DO want to make it a full line
(info.startChar > 0 || (sameLineInsert && diffText.length < numCharsOnRightEditorStartLine))
// if the right editor line was empty, it's ALWAYS a single line insert [not an OR above?]
&& numCharsOnLineOtherEditor > 0
// if the text being inserted starts mid-line
&& (info.startChar < numCharsOnRightEditorStartLine)) {
numRows++;
}
lineInfo = {
leftStartLine,
leftEndLine: leftStartLine + numRows,
rightStartLine: info.startLine,
rightEndLine: info.endLine + 1,
};
}
return lineInfo;
}
// helper to return the startline, endline, startChar and endChar for a diff in a particular editor. Pretty
// fussy function
function getSingleDiffInfo(editor, offset, diffString) {
const info = {
startLine: 0,
startChar: 0,
endLine: 0,
endChar: 0,
};
const endCharNum = offset + diffString.length;
let runningTotal = 0;
let startLineSet = false;
let endLineSet = false;
editor.lineLengths.forEach((lineLength, lineIndex) => {
runningTotal += lineLength;
if (!startLineSet && offset < runningTotal) {
info.startLine = lineIndex;
info.startChar = offset - runningTotal + lineLength;
startLineSet = true;
}
if (!endLineSet && endCharNum <= runningTotal) {
info.endLine = lineIndex;
info.endChar = endCharNum - runningTotal + lineLength;
endLineSet = true;
}
});
// if the start char is the final char on the line, it's a newline & we ignore it
if (info.startChar > 0 && getCharsOnLine(editor, info.startLine) === info.startChar) {
info.startLine++;
info.startChar = 0;
}
// if the end char is the first char on the line, we don't want to highlight that extra line
if (info.endChar === 0) {
info.endLine--;
}
const endsWithNewline = /\n$/.test(diffString);
if (info.startChar > 0 && endsWithNewline) {
info.endLine++;
}
return info;
}
// note that this and everything else in this script uses 0-indexed row numbers
function getCharsOnLine(editor, line) {
return getLine(editor, line).length;
}
function getLineForCharPosition(editor, offsetChars) {
const lines = editor.ace.getSession().doc.getAllLines();
let foundLine = 0;
let runningTotal = 0;
for (let i = 0; i < lines.length; i += 1) {
runningTotal += lines[i].length + 1; // +1 needed for newline char
if (offsetChars <= runningTotal) {
foundLine = i;
break;
}
}
return foundLine;
}
function isLastChar(editor, char, startsWithNewline) {
const lines = editor.ace.getSession().doc.getAllLines();
let runningTotal = 0;
for (let i = 0; i < lines.length; i += 1) {
runningTotal += lines[i].length + 1; // +1 needed for newline char
let comparison = runningTotal;
if (startsWithNewline) {
comparison -= 1;
}
if (char === comparison) {
break;
}
}
return isLastChar;
}
function createGutter(acediff3) {
acediff3.gutterHeight = Math.max(document.getElementById(acediff3.options.classes.gutter1).clientHeight, document.getElementById(acediff3.options.classes.gutter2).clientHeight);
acediff3.gutterWidth = Math.max(document.getElementById(acediff3.options.classes.gutter1).clientWidth, document.getElementById(acediff3.options.classes.gutter2).clientWidth);
const leftHeight = getTotalHeight(acediff3, C.EDITOR_LEFT);
const commonHeight = getTotalHeight(acediff3, C.EDITOR_COMMON);
const rightHeight = getTotalHeight(acediff3, C.EDITOR_RIGHT);
const height = Math.max(leftHeight, rightHeight, commonHeight, acediff3.gutterHeight);
acediff3.gutterSVG1 = document.createElementNS(C.SVG_NS, 'svg');
acediff3.gutterSVG1.setAttribute('width', acediff3.gutterWidth);
acediff3.gutterSVG1.setAttribute('height', height);
acediff3.gutterSVG2 = document.createElementNS(C.SVG_NS, 'svg');
acediff3.gutterSVG2.setAttribute('width', acediff3.gutterWidth);
acediff3.gutterSVG2.setAttribute('height', height);
document.getElementById(acediff3.options.classes.gutter1).appendChild(acediff3.gutterSVG1);
document.getElementById(acediff3.options.classes.gutter2).appendChild(acediff3.gutterSVG2);
}
// acediff3.editors.left.ace.getSession().getLength() * acediff3.lineHeight
function getTotalHeight(acediff3, editor) {
const ed = (editor === C.EDITOR_COMMON) ? acediff3.editors.common : (editor === C.EDITOR_LEFT) ? acediff3.editors.left : acediff3.editors.right;
return ed.ace.getSession().getLength() * acediff3.lineHeight;
}
// creates two contains for positioning the copy left + copy right arrows
function createCopyContainers(acediff3) {
acediff3.copyRightContainer = document.createElement('div');
acediff3.copyRightContainer.setAttribute('class', acediff3.options.classes.copyRightContainer);
acediff3.copyLeftContainer = document.createElement('div');
acediff3.copyLeftContainer.setAttribute('class', acediff3.options.classes.copyLeftContainer);
document.getElementById(acediff3.options.classes.gutter1).appendChild(acediff3.copyRightContainer);
document.getElementById(acediff3.options.classes.gutter2).appendChild(acediff3.copyLeftContainer);
}
function clearGutter(acediff3) {
// gutter.innerHTML = '';
document.getElementById(acediff3.options.classes.gutter1).removeChild(acediff3.gutterSVG1);
document.getElementById(acediff3.options.classes.gutter2).removeChild(acediff3.gutterSVG2);
createGutter(acediff3);
}
function clearArrows(acediff3) {
acediff3.copyLeftContainer.innerHTML = '';
acediff3.copyRightContainer.innerHTML = '';
}
/*
* This combines multiple rows where, say, line 1 => line 1, line 2 => line 2, line 3-4 => line 3. That could be
* reduced to a single connector line 1=4 => line 1-3
*/
function simplifyDiffs(acediff3, diffs) {
const groupedDiffs = [];
function compare(val) {
return (acediff3.options.diffGranularity === C.DIFF_GRANULARITY_SPECIFIC) ? val < 1 : val <= 1;
}
diffs.forEach((diff, index) => {
if (index === 0) {
groupedDiffs.push(diff);
return;
}
// loop through all grouped diffs. If this new diff lies between an existing one, we'll just add to it, rather
// than create a new one
let isGrouped = false;
for (let i = 0; i < groupedDiffs.length; i += 1) {
if (compare(Math.abs(diff.leftStartLine - groupedDiffs[i].leftEndLine))
&& compare(Math.abs(diff.rightStartLine - groupedDiffs[i].rightEndLine))) {
// update the existing grouped diff to expand its horizons to include this new diff start + end lines
groupedDiffs[i].leftStartLine = Math.min(diff.leftStartLine, groupedDiffs[i].leftStartLine);
groupedDiffs[i].rightStartLine = Math.min(diff.rightStartLine, groupedDiffs[i].rightStartLine);
groupedDiffs[i].leftEndLine = Math.max(diff.leftEndLine, groupedDiffs[i].leftEndLine);
groupedDiffs[i].rightEndLine = Math.max(diff.rightEndLine, groupedDiffs[i].rightEndLine);
isGrouped = true;
break;
}
}
if (!isGrouped) {
groupedDiffs.push(diff);
}
});
// clear out any single line diffs (i.e. single line on both editors)
const fullDiffs = [];
groupedDiffs.forEach((diff) => {
if (diff.leftStartLine === diff.leftEndLine && diff.rightStartLine === diff.rightEndLine) {
return;
}
fullDiffs.push(diff);
});
return fullDiffs;
}
function decorate(acediff3) {
clearGutter(acediff3);
clearArrows(acediff3);
acediff3.diffs1.forEach((info, diffIndex) => {
if (acediff3.options.showDiffs) {
showDiff(acediff3, C.EDITOR_LEFT, info.leftStartLine, info.leftEndLine, acediff3.options.classes.diff);
showDiff(acediff3, C.EDITOR_COMMON, info.rightStartLine, info.rightEndLine, acediff3.options.classes.diff);
if (acediff3.options.showConnectors) {
addConnector(acediff3, acediff3.editors.left, acediff3.editors.common, info.leftStartLine, info.leftEndLine, info.rightStartLine, info.rightEndLine);
}
addCopyArrows(acediff3, info, diffIndex, true);
}
}, acediff3);
acediff3.diffs2.forEach((info, diffIndex) => {
if (acediff3.options.showDiffs) {
showDiff(acediff3, C.EDITOR_COMMON, info.leftStartLine, info.leftEndLine, acediff3.options.classes.diff);
showDiff(acediff3, C.EDITOR_RIGHT, info.rightStartLine, info.rightEndLine, acediff3.options.classes.diff);
if (acediff3.options.showConnectors) {
addConnector(acediff3, acediff3.editors.common, acediff3.editors.right, info.leftStartLine, info.leftEndLine, info.rightStartLine, info.rightEndLine);
}
addCopyArrows(acediff3, info, diffIndex, false);
}
}, acediff3);
}
module.exports = AceDiff3;
<file_sep>/README.md
# Ace-diff3
This is a wrapper for [Ace Editor](http://ace.c9.io/) to provide a 3-panel diffing/merging tool that visualizes differences in three documents and allows users to copy changes between them.
It's based on a fork of [Ace Diff](https://github.com/ace-diff/ace-diff) and built on top of [google-diff-match-patch](https://code.google.com/p/google-diff-match-patch/) library. That lib handles the hard part: the computation of the document diffs.
Ace-diff 3 just visualizes that information as line-diffs in the editors.
## How to Install
```bash
yarn && yarn build
```
Copy the files from ```dist/``` into your project.
### HTML
```html
<div class="acediff3"></div>
```
### JavaScript
Here's an example of how you'd instantiate AceDiff3.
```js
const differ = new AceDiff3({
ace: window.ace, // You Ace Editor instance
element: '.acediff3',
left: {
content: 'your local file content here',
},
common: {
content: 'your base file content here',
},
right: {
content: 'your incoming file content here',
},
});
```
Everything else is the same as Ace Diff - See the [Ace Diff Source](https://github.com/ace-diff/ace-diff) for information.
## License
MIT.
<file_sep>/test/visuals/getCurve.test.js
const { expect } = require('chai');
const getCurve = require('../../src/visuals/getCurve');
describe('getCurve', () => {
it('should export getCurve object', () => {
expect(getCurve).to.exist;
});
it('should return correct curve string', () => {
expect(getCurve(1, 2, 3, 4)).to.equal('M 1 2 C 2,2 2,4 3,4');
});
});
| 2ef234ada7839930f14f2f0cfc20742ccb0f985b | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | slimjimsoftware/ace-diff3 | 7d28eb4573f3e6d60f220d2b29153017ea3fd401 | 881125b90ed3d045bac1a7d14953490807c45f97 |
refs/heads/master | <file_sep>const express = require('express');
const ensure = require('connect-ensure-login');
const protRoutes = express.Router();
const User = require('../models/user.js');
const Phone = require('../models/phone.js');
const bcrypt = require('bcrypt');
const multer = require('multer');
const multerS3 = require('multer-s3');
const aws = require('aws-sdk');
let uploads;
let getUploadWebAddress;
if(process.env.NODE_ENV === "production"){
getUploadWebAddress = (file) => {
return file.location;
};
const s3 = new aws.S3({
accessKeyId: process.env.ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY
});
uploads = multer({
storage:multerS3({
s3: s3,
bucket: process.env.BUCKET_NAME,
contentType: multerS3.AUTO_CONTENT_TYPE
})
});
}else{
getUploadWebAddress = (file) => {
return `/uploads/${file.filename}`;
};
uploads = multer({
dest: __dirname + '/../public/uploads/'
});
}
//Get route for the
protRoutes.get('/phones/new',ensure.ensureLoggedIn(), (req,res,next)=>{
res.render('phones/new');
});
//Note: Multer has to be this to works
// | ('picture')
// ----------------------------------
// refers to <input type="file" name="picture"> in phones/new.ejs |
//
protRoutes.post('/phones', ensure.ensureLoggedIn(),uploads.single('picture'), (req,res,next)=>{
// Note that req.file.filename referes to an attribute of .file that does not get defined by
// developer.. Meaning that "file.filename" will return the literal filename of the file as a string.
//It is then used as a means to populate the imageUrl for our phoneInfo object.
let phoneInfo = '';
if(req.file === undefined){
phoneInfo = {
brand: req.body.brand,
model: req.body.model,
condition: req.body.condition,
memory: req.body.memory,
color: req.body.color,
price: req.body.price,
provider: req.body.provider,
unlocked: req.body.unlocked,
additionalDetails: req.body.additionalDetails,
owner: req.user._id //<-- we add the user ID.. Because of passport, we get to use this.
};
}else{
phoneInfo = {
brand: req.body.brand,
model: req.body.model,
condition: req.body.condition,
memory: req.body.memory,
color: req.body.color,
price: req.body.price,
provider: req.body.provider,
unlocked: req.body.unlocked,
additionalDetails: req.body.additionalDetails,
imageUrl: getUploadWebAddress(req.file),
owner: req.user._id //<-- we add the user ID.. Because of passport, we get to use this.
};
}
const newPhone = new Phone(phoneInfo);
newPhone.save ((err)=>{
if(err){
next(err);
return;
} else {
req.flash('success', 'Your phone has been added.');
res.redirect("/phones");
}
});
});
//Get route that renders user's listings.
protRoutes.get('/phones/my-phones',ensure.ensureLoggedIn(),(req,res,next)=>{
Phone.find({owner: req.user._id}, (err, myPhones)=>{
if (err){
next(err);
return;
}
res.render('phones/my-phones', { phones: myPhones });
});
});
//Get route that renders the edit view with the phones attributes prefilled so
//that the user can keep the information or update.
protRoutes.get('/phones/:id/edit',(req,res,next)=>{
const phoneId = req.params.id;
Phone.findById(phoneId, (err,phone)=>{
if(err){
next(err);
return;
}
res.render('phones/edit', {phone: phone});
});
});
//Post route for edit
protRoutes.post('/phones/:id',ensure.ensureLoggedIn(),uploads.single('picture'),(req,res,next)=>{
const phoneId = req.params.id;
let phoneUpdates = '';
if(typeof req.file === undefined){
phoneUpdates = {
brand: req.body.brand,
model: req.body.model,
condition: req.body.condition,
memory: req.body.memory,
color: req.body.color,
price: req.body.price,
provider: req.body.provider,
unlocked: req.body.unlocked,
additionalDetails: req.body.additionalDetails,
owner: req.user._id //<-- we add the user ID.. Because of passport, we get to use this.
};
}else{
phoneUpdates = {
brand: req.body.brand,
model: req.body.model,
condition: req.body.condition,
memory: req.body.memory,
color: req.body.color,
price: req.body.price,
provider: req.body.provider,
unlocked: req.body.unlocked,
additionalDetails: req.body.additionalDetails,
imageUrl: getUploadWebAddress(req.file),
owner: req.user._id //<-- we add the user ID.. Because of passport, we get to use this.
};
}
Phone.findByIdAndUpdate(phoneId, phoneUpdates, (err,phone)=>{
if(err){
next(err);
return;
}
res.redirect('/phones/my-phones');
});
});
//Post route for delete
protRoutes.post('/phones/:id/delete',ensure.ensureLoggedIn(),(req,res,next)=>{
const phoneId = req.params.id;
Phone.findByIdAndRemove(phoneId, (err, phone)=>{
if(err){
next(err);
return;
}
res.redirect('/phones/my-phones');
});
});
//Get route that renders user's dashboard.
protRoutes.get('/dashboard',ensure.ensureLoggedIn(),(req,res,next)=>{
res.render('dashboard/profile');
});
//Get route for user profile edit page.
protRoutes.get('/dashboard/profile/:id/edit',ensure.ensureLoggedIn(), (req, res, next)=>{
res.render('dashboard/edit-profile');
});
protRoutes.post('/dashboard/profile/:id',ensure.ensureLoggedIn(),(req,res, next)=>{
const userId = req.params.id;
let userUpdates;
if(req.body.newPassword){
const salt = bcrypt.genSaltSync(10);
const hashPass = bcrypt.hashSync(password, salt);
userUpdates = {
firstName: req.body.firstName,
lastName: req.body.lastName,
email: req.body.email,
password: <PASSWORD>,
};
} else {
userUpdates = {
firstName: req.body.firstName,
lastName: req.body.lastName,
email: req.body.email
};
}
User.findByIdAndUpdate(userId, userUpdates, (err, user)=>{
if(err){
next(err);
return;
}
res.redirect('/dashboard');
});
});
protRoutes.get('/phones/my-phones/search',(req,res,next)=>{
const searchTerm = req.query.searchTerm;
if (searchTerm){
Phone.search(searchTerm,(err, results)=>{
console.log("results:", results);
if(err){
next(err);
return;
}
res.render('phones/my-phones',{
phones: results
});
});
}else{
res.redirect('/phones/my-phones');
}
});
module.exports = protRoutes;
<file_sep>#Pristine Marketplace: a marketplace to turn that forgotten device into cash.
This project is my first attempt at a full stack web application using the M.E.N. Stack (Angular was not yet covered).
Inspiration behind developing the Pristine Marketplace:
Chances are you or someone you know has a smart phone (iPhone, Samsung, etc.) that
is not longer being used and is collecting dust somewhere. The Pristine Marketplace
allows users to list their device to a local market; meaning customers are located
in and around the area.
Buyers are able to:
- Create listings
- View both their own and the whole inventory of phones.
- Update their listings.
- Delete their own listings.
Sellers are able to:
- View all listings
- Filter the listings via a Search feature.
- Inquire further about any listing that interests them.
Authentication and Authorization is enforced by a local strategy to provide for
a safe and vetted marketplace.
Technologies Used: MongoDB, Express.js, Node.js, HTML, CSS, jQuery, and DOM Manipulation.
Notable NPM Packages: mongoose-regex-search.
Will come back to refactor:
HTML / CSS
Will Implement:
Stripe API
Known Bug: "file undefined" error when edit is submitted without entering
<file_sep>const express = require('express');
const authRoutes = express.Router();
const User = require('../models/user.js');
const passport = require('passport');
const bcrypt = require('bcrypt');
const flash = require('connect-flash');
authRoutes.get('/signup', (req,res,next)=>{
res.render('auth/signup');
});
authRoutes.post('/signup',(req,res,next)=>{
const email = req.body.email;
const password = req.body.password;
//If user does not provide username and password.
if (email === '' || password === ''){
res.render('auth/signup',{
errorMessage: "Please fill out both an email and password"
});
return;
}
//Check to see if user exists.
User.findOne({email: email}, {email:1}, (err,foundUser)=>{
if(err){
next(err);
return;
}
//If the foundUser is not null (meaning it does have something), render
//page with error message and early return.
if(foundUser !== null){
res.render('auth/signup',{
errorMessage: 'The email already exits'
});
return;
}
//If username does not exits, continue with user creation.
const salt = bcrypt.genSaltSync(10);
const hashPass = bcrypt.hashSync(password, salt);
//create userInfo with hashed password
const userInfo = {
firstName: req.body.firstName,
lastName: req.body.lastName,
email: email,
password: <PASSWORD>
};
//Creage user object with the user model using the entered userInfo (email and password)
const theUser = new User(userInfo);
//Save the created user to the database/
//If error, gracefully notify user.
//Commit save and notify user of success with a flash message.
theUser.save((err)=>{
if(err){
res.render('auth/signup', {
errorMessage: "This was odd... There was a problem saving. Try again later."
});
return;
}else{
req.flash('success', 'You have been registered. Try logging in');
res.redirect('/');
}
});
});
});
//Stays the same
// authRoutes.get('/login', (req,res,next)=>{
// res.render('auth/login-view.ejs', {errorMessage: req.flash('error')});
// });
authRoutes.get('/login', (req,res,next)=>{
res.render('auth/login', {errorMessage: req.flash('error')});
});
//changes..says that the authentication is done by passport and its using the
//local strategy
authRoutes.post("/login",
passport.authenticate("local", {
successReturnToOrRedirect: "/", //instead of successRedirect (which takes you to home no matter where you were).. successReturnToOrRedirect takes you to the last page you were on.
failureRedirect: "/login",
failureFlash: true, //get flash messages from login fail.
successFlash: 'You have been logged in, user', //get flash messages from login success
passReqToCallback: true
}));
//Get route for logout
//simply destroys the session
//does not destroy the cookie
//it clears all the information associated with the session (ie. currentUser)
authRoutes.get('/logout',(req,res,next)=>{
req.logout(); //Instead of destroy().. it now works for all different strategies (google,facebook,etc.)
req.flash('success', 'You have logged out.');
res.redirect('/');
});
module.exports = authRoutes;
<file_sep>const mongoose = require('mongoose');
const Schema = mongoose.Schema;
mongoose.plugin(require('mongoose-regex-search'));
const phoneSchema = new Schema({
brand: {type: String, index: true, searchable: true, required: true},
model: {type: String, index: true, searchable: true, required: true},
condition: {type: String, index: true, searchable: true, required: true},
memory: {type: String,index: true, searchable: true, required: true},
color: {type: String, index: true, searchable: true, required: true},
price: {type: String, index: true, searchable: true, required: true},
provider: {type: String,index: true, searchable: true, required: true},
unlocked:{type: String, index: true, searchable: true, required: true},
additionalDetails: {type: String, required: true},
imageUrl: {type: String, required: true},
owner: {type: Schema.Types.ObjectId, ref:'User'}
});
phoneSchema.set('timestamps', true);
const Phone = mongoose.model('Phone', phoneSchema);
module.exports = Phone;
<file_sep>const express = require('express');
const Phone = require('../models/phone.js');
const shopRoutes = express.Router();
shopRoutes.get('/phones',(req,res,next)=>{
Phone.find((err,allPhones)=>{
if(err){
next(err);
return;
}
res.render('phones/browse-all',{phones: allPhones});
});
});
shopRoutes.get('/phones/search',(req,res,next)=>{
const searchTerm = req.query.searchTerm;
if (searchTerm){
Phone.search(searchTerm,(err, results)=>{
console.log("results:", results);
if(err){
next(err);
return;
}
res.render('phones/results',{
searchTerm: searchTerm,
phones: results
});
});
}else{
res.redirect('/phones');
}
});
module.exports = shopRoutes;
| ea35590d9cf8bd3bde6e6881156f0caf196c9a23 | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | dsoraluz/pristine-wireless | 7aec7ba09f39531dbe550714d07c82a060331180 | a42568928fb22e0dea44b24f944b8cc32136e163 |
refs/heads/master | <repo_name>SteKelehan/GroceryComparison<file_sep>/ocado.py
import requests
import sys
import urllib.request
URL = 'https://www.ocado.com/webshop/api/v1/search?searchTerm='
def get_product_details(product):
product = product.replace(' ', '%20')
url = URL + product
json = requests.get(url).json()
productArray = json["mainFopCollection"]["sections"][0]["fops"]
return productArray
def make_product_list(product):
productArray = get_product_details(product)
productDict = {}
for item in productArray:
item = item["product"]
productDict[item["name"]] = {
"price" : item["price"]["current"],
"unit price": item["price"]["unit"],
}
return productDict
<file_sep>/amazonScrape.py
# import requests
# from bs4 import BeautifulSoup
# import pandas as pd
# import sys
# import configparser
# URLStart = 'https://www.amazon.co.uk/s?k='
# URLEnd = '&i=amazonfresh&ref=nb_sb_noss_1'
# config = configparser.ConfigParser()
# config.read('config.ini')
# headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0", "Accept-Encoding":"gzip, deflate", "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "DNT":"1","Connection":"close", "Upgrade-Insecure-Requests":"1"}
# session = requests.Session()
# session.headers = config['Amazon']['header2']
# # URL = config['Amazon']['URLSignIn']
# resp = session.get(config['Amazon']['URLSignIn'])
# html = resp.text
# soup = BeautifulSoup(html , 'lxml')
# data = {}
# form = soup.find('form', {'name': 'signIn'})
# for field in form.find_all('input'):
# try:
# data[field['name']] = field['value']
# except:
# pass
# data[u'email'] = config['Amazon']['User']
# data[u'password'] = config['Amazon']['Password']
# post_resp = session.post(config['Amazon']['URLSignIn'], data = data)
# post_soup = BeautifulSoup(post_resp.content , 'lxml')
# if post_soup.find_all('title')[0].text == 'Your Account':
# print('Login Successfull')
# else:
# print('Login Failed')
# def get_product_details(product):
# cookies = {'enwiki_session': '17ab96bd8ffbe8ca58a78657a918558'}
# product = product.replace(' ', '+')
# url = URLStart + product + URLEnd
# data = requests.get(url, headers=headers, cookies=cookies)
# content = data.content
# soup = BeautifulSoup(content)
# print(soup)
# return data
# get_product_details('oat milk')
import requests
import json
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from product import Product
URL = "http://www.amazon.co.uk/"
options = webdriver.ChromeOptions()
options.add_argument('--ignore-certificate-errors')
options.add_argument('--incognito')
options.add_argument('--headless')
driver = webdriver.Chrome("/Users/Ste/Downloads/chromedriver", chrome_options=options)
search_term = str(input("What are you looking for?\n:"))
driver.get(URL)
element = driver.find_element_by_xpath('//*[@id="twotabsearchtextbox"]')
element.send_keys(search_term)
element.send_keys(Keys.ENTER)
products = []
def convert_price_toNumber(price):
price = price.split("£")[1]
try:
price = price.split("\n")[0] + "." + price.split("\n")[1]
except:
Exception()
try:
price = price.split(",")[0] + price.split(",")[1]
except:
Exception()
return float(price)
page = 1
while True:
if page != 1:
try:
driver.get(driver.current_url + "&page=" + str(page))
except:
break
for i in driver.find_elements_by_xpath('//*[@id="search"]/div[1]/div[2]/div/span[4]/div[1]'):
counter = 0
for element in i.find_elements_by_xpath('//div/div/div[2]/div[2]/div/div[2]/div[1]/div/div[1]/div/div/a'):
should_add = True
name = ""
price = ""
prev_price = ""
link = ""
try:
name = i.find_elements_by_tag_name('h2')[counter].text
price = convert_price_toNumber(element.find_element_by_class_name('a-price').text)
link = i.find_elements_by_xpath('//h2/a')[counter].get_attribute("href")
try:
prev_price = convert_price_toNumber(element.find_element_by_class_name('a-text-price').text)
except:
Exception()
prev_price = price
except:
print("exception")
should_add = False
product = Product(name, price, prev_price, link)
if should_add:
products.append(product)
counter = counter + 1
page = page +1
if page == 3:
break
print(page)
print("done")
| f8f25558c9f5b7677e1cac928d24c09d758b571f | [
"Python"
] | 2 | Python | SteKelehan/GroceryComparison | 7b42f4661d68abc0b00258c726b182cbacc2a61f | e252d9a3d311ca4b236833ae4d15f7a9c1389074 |
refs/heads/master | <file_sep>var personArr = [
{name:'王港', src:'./src/img/3.png', des:'颈椎不好',sex:'m'},
{name:'刘莹', src:'./src/img/5.png', des:'我是谁',sex:'f'},
{name:'王秀莹', src:'./src/img/4.png', des:'我很好看',sex:'f'},
{name:'刘金雷', src:'./src/img/1.png', des:'你没有见过陌生的脸',sex:'m'},
{name:'刘飞翔', src:'./src/img/2.png', des:'瓜皮刘',sex:'m'}
];
var oUl = document.getElementsByTagName('ul')[0];
var oSearch = document.getElementsByClassName('search-box')[0];
var oP = document.getElementsByTagName('p')[0];
// 渲染传入的arr
function renderList(arr) {
var str = '';
arr.forEach(function (ele, index) {
// str += '<li>
// <img src="./src/img/1.png" alt="">
// <p class="username">邓哥</p>
// <p class="des">妻妾成群</p>
// </li>'//标签如何转变成字符串来实现拼接
str += '<li>\
<img src=' + ele.src + ' alt="">\
<p class="username">' + ele.name +'</p>\
<p class="des">' + ele.des +'</p>\
</li>'//详情看正则表达式的拼接有讲原理+\
})
oUl.innerHTML = str;
}
// oUl.innerText = '<li></li><li></li><li></li><li></li><li></li>';
//innerText是当做字符串生成插入到HTML中
//oUl.innerHTML = '<li></li><li></li><li></li><li></li><li></li>';
renderList(personArr);
//把零散的信息收集--->到最后去了
oSearch.oninput = function() {
state.text = this.value;
// var lastArr = filterText(state.text, personArr);
// lastArr = filterSex(state.sex, lastArr);
renderList(lastFilterFunc(personArr));
}
//indexOf有的话就输出其位置,没有的话就输出-1
//根据name来筛选数组
function filterText(text, arr) {
return arr.filter(function (ele, index) {
// if(ele.name.indexOf(text) != -1) {
// return true;
// }else {
// return false;
// }
//如何简化↓
return ele.name.indexOf(text) != -1 ? true : false;
})
}
oP.addEventListener('click', function (e) {
if(e.target.nodeName == 'SPAN') {
//判断是否点击的是缝隙,这里有冒泡,所以都能触发,所以判断在谁身上(事件源对象)
document.getElementsByClassName('active')[0].className = '';
e.target.className = 'active';
//如何获取HTML中的属性
state.sex = e.target.getAttribute('sex');
//getAttribute 不是直接sex,因为sex是手动添加的特性 不是属性
// var lastArr = filterText(state.text, personArr);
// lastArr = filterSex(state.sex, lastArr);
//合并筛选条件↓
renderList(lastFilterFunc(personArr));
//一定要在最后渲染进HTML
}
})
//根据性别来筛选
function filterSex(sex, arr) {
if(sex == 'a') {
return arr;
}else {
return arr.filter(function (ele, index) {
return ele.sex == sex;
})
}
}
//如何交叉 共同选择?————合并筛选条件 ——对象的形式(更方便更新迭代)
function unionFilterFunc(obj) {
return function (arr) {
var lastArr = arr;
for(var prop in obj) {
//prop -->text
lastArr = obj[prop](state[prop], lastArr);
}
return lastArr;
}
}
var lastFilterFunc = unionFilterFunc({text: filterText, sex: filterSex});
//什么类型 + 添加什么函数
//百度联想词_________防抖的知识点之后网络会讲
//********************************++++++++++++++++++++++++++++++ */
//打开思路:
// 订阅模式
function createStore (initState) {
//传入的如果是初始的initState; 不是的话就赋予一个空对象
var state = initState || {};
//订阅过的所有的↓存上
var list = [];
//改变或者访问都需要通过这个方法↓(暂时写三个:getstate dispatch subscribe)
function getState() {
return state;
}
//分发派遣的行为
function dispatch(action) {
state[action.type] = action.value;
list.forEach(function (ele, index) {
ele();
})
}
function subscribe(func) {
}
//返回的对象
return {
getState: getState,
dispatch: dispatch,
subscrib: subscribe
}
}
var Store = createStore({
text: '',
sex: 'a',
age: 0
});
function show () {
console.log('sub');
}
Store.subscribe(show);
console.log(Store.getState());
Store.dispatch({type: 'text', value: '刘'});
console.log(Store.getState());
Store.dispatch({type: 'sex', value: 'm'});
console.log(Store.getState());
Store.dispatch({type: 'text', value: '王'}); | ed1d5555be92fbc436655d55e7489d7540512a00 | [
"JavaScript"
] | 1 | JavaScript | arancat/text-js-someDemos | f5c6aa1d64e71125b541cae288b02a8a98619d32 | aa566ca07370eb58978cfb035d5b27d998539446 |
refs/heads/master | <file_sep>package com.bridgelabz.selenium.base;
import io.github.bonigarcia.wdm.WebDriverManager;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Parameters;
/*@Desc - Created Base class
* used before test and after test annotations
* user parameterization for getting url value from testng.xml file
* imported chrome driver
* */
public class BaseClass {
public static WebDriver driver;
@Parameters({"url"})
@BeforeTest
public void setUp() throws InterruptedException {
WebDriverManager.chromedriver().setup();
driver = new ChromeDriver();
driver.get("https://www.linkedin.com");
driver.manage().window().maximize();
Thread.sleep(5000);
}
@AfterTest
public void tearDown() {
driver.close();
}
}
<file_sep>package com.selenium;
import com.bridgelabz.selenium.base.BaseClass;
import com.bridgelabz.selenium.pages.Login;
import com.bridgelabz.selenium.utility.DataProvider;
import org.testng.Assert;
import org.testng.annotations.Test;
/*called dataProvider class
* created method login Test
* where we initialize username and password as string
* done validation using Title of the page
* */
public class LoginTest_DataProvider extends BaseClass {
@Test(dataProvider="test1",dataProviderClass=DataProvider.class)
public void LoginTest(String UserName, String PassWord) throws InterruptedException {
Login login = new Login();
login.login(UserName,PassWord);
String actualTitle = driver.getTitle();
String expectedTitle = "Feed | LinkedIn";
Assert.assertEquals(actualTitle,expectedTitle);
System.out.println("Test is Passed");
System.out.println(UserName+" | "+PassWord);
}
}
| b3c931f19f049b1779fe960ded15e70c6131ea54 | [
"Java"
] | 2 | Java | vaishnavibirle/LinkedIn_DDD_Framework | 021303527ec57faf7b27bc9a9587e86aebcef133 | 989c9f01568331d8184802e7c93e626b48865185 |
refs/heads/master | <file_sep>## This tells us what to use as a base. The node docker containers are good, so might as well use them
FROM node:9-slim
## Make a directory for our app
RUN mkdir -p /opt/app/public
## Add our code. I like to be explicit and not use wildcards, since it's easy to get the node_modules
## directory which may need to build things which won't work on different architectures (i.e. mac to linux)
ADD package.json yarn.lock index.js /opt/app/
ADD public /opt/app/public/
## Set the default directory we're using on our container
WORKDIR /opt/app
## Run npm install
RUN yarn install
## Expose a port that we can access our web app on
EXPOSE 1313
## Tell our app how to start when we run the docker container. This has to be an array of a command and its arguments
CMD ["yarn", "dev"]
<file_sep>$(document).ready(() => {
$('#rps').submit(function (e) {
e.preventDefault();
const count = parseInt($('#count').val());
if (!count || count < 0) {
$('#error').text('Please make the count a positive number.');
return;
}
if (count > 500) {
$('#error').text(`... Do you really need ${count} rps throws?`);
return;
}
resetRPS();
$('#output').append('<tr><th>#</th><th>Status</th><th>You</th><th>Opponent</th></tr>');
for (let i = 0; i < count; i++) {
const [status, first, second] = rps();
//$('#output').append(`${i+1} - ${status}<br />`);
$('#output').append(`<tr class=${status.toLowerCase()}><td>${i + 1}</td><td>${status}</td><td>${first}</td><td>${second}</td></tr>`);
}
});
});
function resetRPS() {
$('#error').text('');
$('#output').html('');
}
function rps() {
const throws = {
0: 'rock',
1: 'paper',
2: 'scissors'
}
const first = Math.floor(Math.random() * 3);
const second = Math.floor(Math.random() * 3);
const output = compare(first, second);
return [output, throws[first], throws[second]];
}
function compare(first, second) {
if (first > second) {
return "Win";
} else if (first < second) {
return "Loss";
}
return "Tie";
}
| a543d196de1e2236f350b6bf013a3b96c3e2c840 | [
"JavaScript",
"Dockerfile"
] | 2 | Dockerfile | slooker/questpocalypseNow | f20ea8cdfdb1796d336abbd0354716d599dec5b9 | 18537619f0a35c525e1fa60649db125ce189e041 |
refs/heads/master | <repo_name>BigUnit/CSC360<file_sep>/A3/apps/test06.c
#include "../io/File.h"
int main(){
file_check();
printf("File Check Complete.\n");
return 0;
}<file_sep>/A3/README.md
<NAME>
V00876934
Assignment 3: File Systems
For my implementation of a file system I decided to go with the UNIX file system that has:
- 256 inodes
- 4096 blocks
- 512 bytes per blocks
- 2mb virtual disk
Reserved block:
0 -> Superblock
1 -> Free Block Vector
2 -> inode address map
3-9 -> Unused but Reserved
Each inode will be given it's own block and contains the addresses
for all the blocks that that file/directory points to including the use of
single and double indirect blocks.
The virtual disk (vdiskAPI.c) contains 3 functions, read_block , write_block
and create_disk. create_disk is only used in the initialization of the disk to
create the file that will be the virtual disk.
File.c will contain all the functions necessary to make/remove directories,
read/write/remove a file from disk, initialize the disk and all the functions
that interact with the metadata. As well there is a file_check function that can
be run to determine/correct any discrepancies for inodes/blocks be marked as used
if they are not meant to be and vice versa.
The apps folder will contains test files to show that the file system works
properly and a script to run all the test files.
The key tradeoffs off this implementation are:
Pros
- Less prone to data loss as there is no cache to lose
- Doesn't require a cleaner
- No partial overwriting of files
- Very simple
Cons
- Each inode takes a block which wastes space available on disk
- Much slower as it doesn't make use of a cache
- Not as practical as LFS due to being slower and wasteful
Testing Parameters:
Test 01: Initializes Disk, results can be viewed in init_disk.txt
Test 02: Creates Following Directories ("~" is the root), hexdump can be viewed in directories_disk.txt
~/new
~/new/test
~/new/test/test2
~/csc360
~/csc360/assignments
Test 03 : Write the following files to vdisk, hexdump can be viewed in files_disk.txt
~/SmallFile -> SmallFile points to small.txt, a file that uses only direct blocks
~/csc360/MedFile -> MedFile points to med.txt, a file that uses direct and single indirect blocks
~/csc360/assignments/LargeFile -> LargeFile points to large.txt, a file that uses direct, single and double indirect blocks
Test 04 : Read the files that were written to the vdisk in test 3 to files on the real disk
Correctness tested by computing diff between ("_read" suffix being the ones read from vdisk and written to real disk)
small.txt small_read.txt
med.txt med_read.txt
large.txt large_read.txt
Test 05: Removes all files and directories created in previous test, restoring disk to initialized state
Result is stored in removed_disk.txt;
removed_disk.txt and init_disk.txt are then used in diff to show that the removal correct
Test 06: Disk runs the file check on the vdisk and stores the check log into check_log.txt
<file_sep>/A2/Nathan_Marcotte_CSC360_A2/pc_sem_uthread.c
/* <NAME>
* CSC 360 Spring 2019
* V00876934
* pc_sem_uthread.c
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include "uthread.h"
#include "uthread_sem.h"
#define MAX_ITEMS 10
const int NUM_ITERATIONS = 200;
const int NUM_CONSUMERS = 2;
const int NUM_PRODUCERS = 2;
int histogram [MAX_ITEMS+1]; // histogram [i] == # of times list stored i items
int items = 0;
uthread_sem_t lock;
uthread_sem_t can_prod;
uthread_sem_t can_cons;
void* producer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
// TODO
uthread_sem_wait(can_prod); //wait until it can produce
uthread_sem_wait(lock); //lock
items++;
histogram[items]++;
assert(items<=MAX_ITEMS);
uthread_sem_signal(lock); // open lock
uthread_sem_signal(can_cons); // signals that consumer can consume
}
return NULL;
}
void* consumer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
// TODO
uthread_sem_wait(can_cons); // wait until given signal to consume
uthread_sem_wait(lock); // lock
items--;
histogram[items]++;
assert(items>=0);
uthread_sem_signal(lock); // open lock
uthread_sem_signal(can_prod); // send signal that it can produce
}
return NULL;
}
int main (int argc, char** argv) {
uthread_t t[4];
uthread_init (4);
lock = uthread_sem_create(1);
can_prod = uthread_sem_create(MAX_ITEMS);
can_cons = uthread_sem_create(0);
for(int i = 0;i<NUM_PRODUCERS;i++){
t[i]=uthread_create(producer,NULL);
}
for(int i = NUM_PRODUCERS;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
t[i]=uthread_create(consumer,NULL);
}
for(int i = 0;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
uthread_join(t[i],0);
}
// TODO: Create Threads and Join
printf ("items value histogram:\n");
int sum=0;
for (int i = 0; i <= MAX_ITEMS; i++) {
printf (" items=%d, %d times\n", i, histogram [i]);
sum += histogram [i];
}
assert (sum == sizeof (t) / sizeof (uthread_t) * NUM_ITERATIONS);
}
<file_sep>/A3/disk/vdiskAPI.c
#include "vdiskAPI.h"
int read_block(int block, BYTE_t* buf){
if(block > NUM_BLOCKS || block < 0){return -1;}
FILE* disk = fopen(VDISK,"rb");
assert(disk);
fseek(disk,block*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,disk); //allocate buffer outside
assert(!fclose(disk));
return 0;
}
int write_block(int block, BYTE_t* data){
if(block > NUM_BLOCKS || block < 0){return -1;}
FILE* disk = fopen(VDISK,"rb+");
assert(disk);
fseek(disk,block*BLOCK_SIZE,SEEK_SET);
fwrite(data,BLOCK_SIZE,1,disk); //data should be allocated to block size already
assert(!fclose(disk));
return 0;
}
void create_disk(void){
FILE* disk = fopen(VDISK,"wb+");
assert(disk);
assert(!fclose(disk));
} // creates disk <file_sep>/A3/apps/test02.c
#include "../io/File.h"
int main(){
make_dir("~/new");
make_dir("~/new/test");
make_dir("~/new/test/test2");
make_dir("~/csc360");
make_dir("~/csc360/assignments");
return 0;
}<file_sep>/A3/disk/vdiskAPI.h
#ifndef vdiskAPI_H
#define vdiskAPI_H
#include <stdio.h>
#include <assert.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#define VDISK "../disk/vdisk"
#define BLOCK_SIZE 512
#define NUM_BLOCKS 4096
typedef unsigned char BYTE_t;
int read_block(int block, BYTE_t* buf);
int write_block(int block, BYTE_t* data);
void create_disk(void);
#endif <file_sep>/README.md
# CSC360 - University Of Victoria
Operating Systems
<file_sep>/A3/io/File.c
#include "File.h"
void InitLLFS(void){
create_disk();
BYTE_t* buffer = (BYTE_t *)calloc(BLOCK_SIZE, sizeof(BYTE_t));
for(int i=0;i<NUM_BLOCKS;i++){ write_block(i,buffer); } //set everything to 0;
BYTE_t* superblock = (BYTE_t *)calloc(BLOCK_SIZE, sizeof(BYTE_t));
superblock [0] = 0x53;
superblock [1] = 0x4B;
superblock [2] = 0x52;
superblock [3] = 0x54; //magic #
superblock [4] = 0x00;
superblock [5] = 0x00;
superblock [6] = 0x10;
superblock [7] = 0x00; // 4096 blocks
superblock [8] = 0x00;
superblock [9] = 0x00;
superblock [10] = 0x01;
superblock [11] = 0x00; //max 256 inodes BEACUSE ONLY 256 UNIQUE INODE ID (uint8_t)
write_block(0,superblock);
BYTE_t* FBV = (BYTE_t *)calloc(BLOCK_SIZE, sizeof(BYTE_t));
FBV[0] = 0b00000000;
FBV[1] = 0b00111111; // set bits 0-9 to not availible
for(int i = 2;i<BLOCK_SIZE;i++){
FBV[i] = 0b11111111;
}
write_block(1,FBV);
free(FBV);
free(buffer);
free(superblock);
make_root_dir();
}
void close_block(int block){
BYTE_t* buf = (BYTE_t *)calloc(BLOCK_SIZE, sizeof(BYTE_t));
read_block(1,buf);
BYTE_t old = buf[block/8];
BYTE_t new;
BYTE_t mask;
int bit_shift = block%8;
mask = ~(0b10000000 >> (bit_shift));
new = (old & mask);
buf[block/8] = new;
write_block(1,buf);
free(buf);
}
void open_block(int block){
BYTE_t* buf = (BYTE_t *)calloc(BLOCK_SIZE, sizeof(BYTE_t));
read_block(1,buf);
BYTE_t old = buf[block/8];
BYTE_t new;
BYTE_t mask;
int bit_shift = block%8;
mask = 0b10000000 >> (bit_shift);
new = (old | mask);
buf[block/8] = new;
write_block(1,buf);
free(buf);
}
int find_block(){
BYTE_t* FBV = (BYTE_t*)malloc(BLOCK_SIZE * sizeof(BYTE_t));
read_block(1,FBV);
int block_num = -1;
for(int i = 0;i<BLOCK_SIZE;i++){
if(FBV[i]){
if (FBV[i] & 0b10000000){ //isolate value at this position in binary number
block_num = (8*i);
} else if (FBV[i] & 0b01000000) {
block_num = ((8*i) + 1);
} else if (FBV[i] & 0b00100000) {
block_num = ((8*i) + 2);
} else if (FBV[i] & 0b00010000) {
block_num = ((8*i) + 3);
} else if (FBV[i] & 0b00001000) {
block_num = ((8*i) + 4);
} else if (FBV[i] & 0b00000100) {
block_num = ((8*i) + 5);
} else if (FBV[i] & 0b00000010) {
block_num = ((8*i) + 6);
} else if (FBV[i] & 0b00000001) {
block_num = ((8*i) + 7);
}
break;
}
}
free(FBV);
return block_num;
}
void close_inode(int inode_num, int block_address){
assert(!(inode_num<0 || inode_num >= MAX_INODES));
assert(!(block_address<0 || block_address >= NUM_BLOCKS));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
BYTE_t first = (block_address & 0xFF00) >> 8;
BYTE_t second = (block_address & 0x00FF);
read_block(2,buf);
buf[(inode_num*2)] = first;
buf[(inode_num*2)+1] = second;
write_block(2,buf);
free(buf);
}
void open_inode(int inode_num){
assert(!(inode_num<0 || inode_num >= MAX_INODES));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
read_block(2,buf);
buf[(inode_num*2)] = 0x00;
buf[(inode_num*2)+1] = 0x00;
write_block(2,buf);
free(buf);
}
int find_inode(){
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
int i,j;
for(i = 0;i<2;i++){
read_block((2+i),buf);
for(j = 0; j < BLOCK_SIZE; j+=2){
if(i==0 && j==0){ continue; }
if(buf[j] == 0x00 && buf[(j+1)] == 0x00){ return ((256*i) + (j/2)); }
}
}
free(buf);
}
void inode_into_buffer(inode_t* inode, BYTE_t* buffer){
buffer[0] = (inode->size & 0xFF000000) >> 24;
buffer[1] = (inode->size & 0x00FF0000) >> 16;
buffer[2] = (inode->size & 0x0000FF00) >> 8;
buffer[3] = (inode->size & 0x000000FF);
buffer[4] = (inode->flags & 0xFF000000) >> 24;
buffer[5] = (inode->flags & 0x00FF0000) >> 16;
buffer[6] = (inode->flags & 0x0000FF00) >> 8;
buffer[7] = (inode->flags & 0x000000FF);
int cur = 8;
for(int i = 0; i<10; i++){
buffer[2*i + cur] = (inode->blocks[i] & 0xFF00) >> 8;
buffer[2*i + (cur+1)] = (inode->blocks[i] & 0x00FF);
}
buffer[28] = (inode->single_ind & 0xFF00) >> 8;
buffer[29] = (inode->single_ind & 0x00FF);
buffer[30] = (inode->double_ind & 0xFF00) >> 8;
buffer[31] = (inode->double_ind & 0x00FF);
for(int i = 32; i<BLOCK_SIZE; i++){
buffer[i] = 0x00000000;
}
}
void buffer_into_inode(inode_t* inode, BYTE_t* buffer){
inode->size = (buffer[0]<< 24) | (buffer[1]<< 16) | (buffer[2]<< 8) | (buffer[3]) ;
inode->flags = (buffer[4]<< 24) | (buffer[5]<< 16) | (buffer[6]<< 8) | (buffer[7]) ;
int cur = 8;
for(int i = 0; i<10; i++){ inode->blocks[i] = (buffer[2*i + cur]<< 8) | (buffer[2*i + (cur+1)]) ; }
inode->single_ind = ( ( buffer[28]<< 8 ) | (buffer[29]) );
inode->double_ind = ( ( buffer[30]<< 8 ) | (buffer[31]) );
}
void dir_into_buffer(dir_t* dir, BYTE_t* buffer){
int dir_entry_size = sizeof(dir_entry_t);
for(int i = 0;i<16;i++){
buffer[(i*dir_entry_size)] = dir->entries[i].inode_ID;
for(int j = 1; j<32; j++){
buffer[ j + (i*dir_entry_size) ] = dir->entries[i].filename[j-1];
}
}
}
void buffer_into_dir(dir_t* dir, BYTE_t* buffer){
int dir_entry_size = sizeof(dir_entry_t);
for(int i = 0; i<16; i++){
dir->entries[i].inode_ID = buffer[(i*dir_entry_size)];
for(int j = 1; j<32; j++){
dir->entries[i].filename[j-1] = buffer[ j+ (i*dir_entry_size) ] ;
}
}
}
int get_inode_address(int inode_num){
if(inode_num<0 || inode_num >= NUM_BLOCKS){return -1;}
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
BYTE_t first ;
BYTE_t second ;
read_block(2,buf);
first = buf[(inode_num*2)] ;
second = buf[(inode_num*2)+ 1];
free(buf);
int address = (first<<8) | (second) ;
return address;
}
void remove_dir (BYTE_t* path){
inode_t* in = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
dir_t* parent = (dir_t*)calloc(1,sizeof(dir_t));
BYTE_t new_path [MAX_PATH_LEN];
strncpy(new_path,path,MAX_PATH_LEN);
BYTE_t* tok;
const char* delim = "/";
tok = strtok(new_path,delim);
BYTE_t* tokens [4]; // change back to size 4
int path_len = 0;
while( tok != NULL ) {
tokens[path_len] = tok;
path_len++;
tok = strtok(NULL, delim);
}
int parent_dir_block = ROOT_DIR_BLOCK;
int parent_inode_ID = ROOT_INODE_ID;
int parent_inode_address = ROOT_INODE_BLOCK;
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
for(int i = 1;i<path_len-1;i++){ // reads thorugh intermediate directories on path
for(int j=2;j<MAX_DIR_ENTRIES;j++){ // first entry in directory is itself, parent is second so we can skip checking those(start at 2)
if(!(strncmp(tokens[i],parent->entries[j].filename,FILENAME_LEN))){
parent_inode_ID=parent->entries[j].inode_ID;
parent_inode_address = get_inode_address(parent_inode_ID);
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
break;
}
}
}
dir_t* rm_dir = (dir_t*)calloc(1,sizeof(dir_t));
int dir_inode_ID ;
int dir_inode_address;
int dir_block;
for(int j=2;j<MAX_DIR_ENTRIES;j++){ // first entry in directory is itself, parent is second so we can skip checking those(start at 2)
if(!(strncmp(tokens[path_len-1],parent->entries[j].filename,FILENAME_LEN))){
dir_inode_ID=parent->entries[j].inode_ID;
dir_inode_address = get_inode_address(dir_inode_ID);
strncpy(parent->entries[j].filename,"",FILENAME_LEN);
parent->entries[j].inode_ID = 0;
read_block(dir_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
dir_block = in->blocks[0];
read_block(dir_block,buf);
buffer_into_dir(rm_dir,buf);
break;
}
}
for(int i = 2; i<MAX_DIR_ENTRIES; i++){ assert(rm_dir->entries[i].inode_ID==0); } //assert dir has no entries beyong itself and parent
dir_into_buffer(parent,buf);
write_block(parent_dir_block,buf);
BYTE_t* wipe = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
write_block(dir_inode_address,wipe);
write_block(dir_block,wipe);
open_inode(dir_inode_ID);
open_block(dir_inode_address);
open_block(dir_block);
free(in);
free(buf);
free(parent);
free(wipe);
free(rm_dir);
}
void make_dir (BYTE_t* path){
inode_t* in = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
dir_t* parent = (dir_t*)calloc(1,sizeof(dir_t));
BYTE_t new_path [MAX_PATH_LEN];
strncpy(new_path,path,MAX_PATH_LEN);
BYTE_t* tok;
const char* delim = "/";
tok = strtok(new_path,delim);
BYTE_t* tokens [4]; // change back to size 4
int path_len = 0;
while( tok != NULL ) {
tokens[path_len] = tok;
path_len++;
tok = strtok(NULL, delim);
}
int parent_dir_block = ROOT_DIR_BLOCK;
int parent_inode_ID = ROOT_INODE_ID;
int parent_inode_address = ROOT_INODE_BLOCK;
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
for(int i = 1;i<path_len-1;i++){ // reads thorugh intermediate directories on path
for(int j=2;j<MAX_DIR_ENTRIES;j++){ // first entry in directory is itself, parent is second so we can skip checking those(start at 2)
if(!(strncmp(tokens[i],parent->entries[j].filename,FILENAME_LEN))){
parent_inode_ID=parent->entries[j].inode_ID;
parent_inode_address = get_inode_address(parent_inode_ID);
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
break;
}
}
}
inode_t* new_dir_inode = (inode_t*)calloc(1,sizeof(inode_t));
dir_t* new_dir = (dir_t*)calloc(1,sizeof(dir_t));
int inode_num = find_inode();
int inode_block = find_block();
close_block(inode_block);
close_inode(inode_num, inode_block);
int new_dir_block = find_block();
close_block(new_dir_block);
new_dir_inode->size = 0;
new_dir_inode->flags = 1;
new_dir_inode->blocks [0] = new_dir_block;
new_dir->entries[0].inode_ID = (uint8_t)inode_num;
strncpy(new_dir->entries[0].filename, tokens[path_len-1], FILENAME_LEN); //first entry is itself
new_dir->entries[1].inode_ID = parent->entries[0].inode_ID;
strncpy(new_dir->entries[1].filename, parent->entries[0].filename, FILENAME_LEN); // second entry is parent directory
int dir_opening ;
for(dir_opening=2;dir_opening<MAX_DIR_ENTRIES;dir_opening++){
if(!strcmp(parent->entries[dir_opening].filename,"") ){break;};
}
assert(dir_opening<MAX_DIR_ENTRIES);
parent->entries[dir_opening].inode_ID = (uint8_t)inode_num;
strncpy(parent->entries[dir_opening].filename, tokens[path_len-1] , FILENAME_LEN);
inode_into_buffer(new_dir_inode,buf);
write_block(inode_block,buf);
dir_into_buffer(new_dir,buf);
write_block(new_dir_block,buf);
dir_into_buffer(parent,buf);
write_block(parent_dir_block,buf);
free(new_dir_inode);
free(new_dir);
free(in);
free(parent);
free(buf);
}
void make_root_dir (){
inode_t* in = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
dir_t* root = (dir_t*)calloc(1,sizeof(dir_t));
int inode_num = find_inode();
int inode_block = find_block();
close_block(inode_block);
close_inode(inode_num, inode_block);
int root_block = find_block();
close_block(root_block);
in->size = 0;
in->flags = 1;
in->blocks [0] = root_block;
root->entries[0].inode_ID = (uint8_t)inode_num;
root->entries[0].filename[0] = '~';
root->entries[0].filename[1] = '\0';
root->entries[1].inode_ID = root->entries[0].inode_ID;
strncpy(root->entries[1].filename,root->entries[0].filename,FILENAME_LEN); // root's parent directory is itself, 31 is max filename len
inode_into_buffer(in,buf);
write_block(inode_block,buf);
dir_into_buffer(root,buf);
write_block(root_block,buf);
free(in);
free(root);
free(buf);
}
void print_inode(inode_t* inode){
printf("%" PRIu32 "\n",inode->size);
printf("%" PRIu32 "\n",inode->flags);
for(int i = 0;i<10;i++){
printf("%" PRIu16 " ",inode->blocks[i]);
}
printf("\n");
printf("%" PRIu16 "\n",inode->single_ind);
printf("%" PRIu16 "\n",inode->double_ind);
} //testing code
void print_buf(BYTE_t* buffer){
printf("%u \n", ((buffer[0]<< 24) | (buffer[1]<< 16) | (buffer[2]<< 8) | (buffer[3])) );
printf("%u \n", ((buffer[4]<< 24) | (buffer[5]<< 16) | (buffer[6]<< 8) | (buffer[7])) );
BYTE_t temp [10];
int cur = 8;
for(int i = 0; i<10; i++){
temp[i] = (buffer[2*i + cur]<< 8) | (buffer[2*i + (cur+1)]);
}
} //testing code
void write_file(BYTE_t* path, FILE* file){
inode_t* in = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
dir_t* parent = (dir_t*)calloc(1,sizeof(dir_t));
BYTE_t new_path [MAX_PATH_LEN];
strncpy(new_path,path,MAX_PATH_LEN);
BYTE_t* tok;
const char* delim = "/";
tok = strtok(new_path,delim);
BYTE_t* tokens [5];
int path_len = 0;
while( tok != NULL ) {
tokens[path_len] = tok;
path_len++;
tok = strtok(NULL, delim);
}
int parent_dir_block = ROOT_DIR_BLOCK;
int parent_inode_ID = ROOT_INODE_ID;
int parent_inode_address = ROOT_INODE_BLOCK;
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
for(int i = 1;i<path_len-1;i++){ // reads thorugh intermediate directories on path
for(int j=2;j<MAX_DIR_ENTRIES;j++){ // first entry in directory is itself, parent is second so we can skip checking those(start at 2)
if(!(strncmp(tokens[i],parent->entries[j].filename,FILENAME_LEN))){
parent_inode_ID=parent->entries[j].inode_ID;
parent_inode_address = get_inode_address(parent_inode_ID);
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
break;
}
}
}
int dir_opening ;
for(dir_opening=2;dir_opening<MAX_DIR_ENTRIES;dir_opening++){
if(!strcmp(parent->entries[dir_opening].filename,"") ){break;};
}
assert(dir_opening<MAX_DIR_ENTRIES);
fseek(file,0,SEEK_END);
int len = ftell(file);
fseek(file,0,SEEK_SET); // RETURN FP TO START
int full_blocks = len/512;
int extra_bytes = len%512;
int blocks_needed = full_blocks+(extra_bytes!=0);
inode_t* file_inode = (inode_t*)calloc(1,sizeof(inode_t));
int file_inode_ID = find_inode();
int file_inode_block = find_block();
close_block(file_inode_block);
close_inode(file_inode_ID,file_inode_block);
file_inode->size = len;
file_inode->flags = 0;
BYTE_t* double_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
BYTE_t* single_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
parent->entries[dir_opening].inode_ID = (uint8_t)file_inode_ID;
strncpy(parent->entries[dir_opening].filename, tokens[path_len-1] , FILENAME_LEN);
dir_into_buffer(parent,buf);
write_block(parent_dir_block,buf);
int block;
if (blocks_needed<=10) {
for(int i = 0; i<blocks_needed; i++){
block = find_block();
close_block(block);
file_inode->blocks[i] = block;
fseek(file,i*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,file);
if(i == blocks_needed-1 ){ for(int j = extra_bytes; j<BLOCK_SIZE; j++){ buf[j]=0; } }
write_block(block,buf);
}
} else if(blocks_needed>10 && blocks_needed<=266){
block = find_block();
close_block(block);
file_inode->single_ind = block;
int di_blocks_written;
int s_blocks_written;
for(di_blocks_written = 0; di_blocks_written<10; di_blocks_written++){
block = find_block();
close_block(block);
file_inode->blocks[di_blocks_written] = block;
fseek(file,di_blocks_written*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,file);
write_block(block,buf);
}
blocks_needed -= di_blocks_written;
read_block(file_inode->single_ind,single_buf);
for(s_blocks_written = 0; s_blocks_written<blocks_needed; s_blocks_written++){
block = find_block();
close_block(block);
single_buf[2*s_blocks_written] = (block & 0xFF00) >> 8;
single_buf[(2*s_blocks_written)+1] = (block & 0x00FF);
fseek(file,(di_blocks_written+s_blocks_written)*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,file);
if(s_blocks_written == blocks_needed-1 && extra_bytes != 0){ for(int j = extra_bytes; j<BLOCK_SIZE; j++){ buf[j]=0; } }
write_block(block,buf);
}
write_block(file_inode->single_ind,single_buf);
} else if (blocks_needed > 266){ // if need more than 10 direct block and the 256 indirect
block = find_block();
close_block(block);
file_inode->double_ind = block;
block = find_block();
close_block(block);
file_inode->single_ind = block;
int di_blocks_written;
int s_blocks_written;
for(di_blocks_written = 0; di_blocks_written<10; di_blocks_written++){
block = find_block();
close_block(block);
file_inode->blocks[di_blocks_written] = block;
fseek(file,di_blocks_written*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,file);
write_block(block,buf);
}
blocks_needed -= di_blocks_written;
read_block(file_inode->single_ind,single_buf);
for(s_blocks_written = 0; s_blocks_written<256; s_blocks_written++){
block = find_block();
close_block(block);
single_buf[2*s_blocks_written] = (block & 0xFF00) >> 8;
single_buf[(2*s_blocks_written)+1] = (block & 0x00FF);
fseek(file,(di_blocks_written+s_blocks_written)*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,file);
if(s_blocks_written == blocks_needed-1 && extra_bytes != 0){ for(int j = extra_bytes; j<BLOCK_SIZE; j++){ buf[j]=0; } }
write_block(block,buf);
}
blocks_needed -= s_blocks_written;
write_block(file_inode->single_ind,single_buf);
int si_blocks_needed = (blocks_needed/256) + (!((blocks_needed%256)==0)); // calculates how many single blocks need to be written in current iteration of double indirect block loop
int blocks_to_write = 256*(blocks_needed>256)+((blocks_needed<=256)*blocks_needed%256);
int single_block;
for(int k = 0; k<si_blocks_needed;k++){
single_block = find_block();
close_block(single_block);
double_buf[2*k] = (single_block & 0xFF00) >> 8;
double_buf[(2*k)+1] = (single_block & 0x00FF);
read_block(single_block,single_buf);
blocks_to_write = 256*(blocks_needed>256)+((blocks_needed<=256)*blocks_needed%256);
for(int i = 0; i<blocks_to_write; i++){
block = find_block();
close_block(block);
single_buf[(2*i)] = (block & 0xFF00) >> 8;
single_buf[(2*i)+1] = (block & 0x00FF);
fseek(file,((s_blocks_written+di_blocks_written)+i)*BLOCK_SIZE,SEEK_SET);
fread(buf,BLOCK_SIZE,1,file);
if(i == blocks_to_write-1 && extra_bytes != 0){ for(int j = extra_bytes; j<BLOCK_SIZE; j++){ buf[j]=0; } }
write_block(block,buf);//blocks_needed -= 256;
}
if(blocks_needed>256){
blocks_needed -= 256;
s_blocks_written += 256;
} else {
for(int j = 2*blocks_to_write;j<BLOCK_SIZE;j++){ single_buf[j]=0; }
}
write_block(single_block,single_buf);
}
for(int j = 2*si_blocks_needed;j<BLOCK_SIZE;j++){ double_buf[j]=0; }
write_block(file_inode->double_ind,double_buf);
}
inode_into_buffer(file_inode,buf);
write_block(file_inode_block,buf);
free(file_inode);
free(buf);
free(double_buf);
free(single_buf);
free(parent);
free(in);
}
void remove_file(BYTE_t* path){
inode_t* in = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
dir_t* parent = (dir_t*)calloc(1,sizeof(dir_t));
BYTE_t new_path [MAX_PATH_LEN];
strncpy(new_path,path,MAX_PATH_LEN);
BYTE_t* tok;
const char* delim = "/";
tok = strtok(new_path,delim);
BYTE_t* tokens [5]; // change back to size 4
int path_len = 0;
while( tok != NULL ) {
tokens[path_len] = tok;
path_len++;
tok = strtok(NULL, delim);
}
int parent_dir_block = ROOT_DIR_BLOCK;
int parent_inode_ID = ROOT_INODE_ID;
int parent_inode_address = ROOT_INODE_BLOCK;
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
for(int i = 1;i<path_len-1;i++){ // reads thorugh intermediate directories on path
for(int j=2;j<MAX_DIR_ENTRIES;j++){ // first entry in directory is itself, parent is second so we can skip checking those(start at 2)
if(!(strncmp(tokens[i],parent->entries[j].filename,FILENAME_LEN))){
parent_inode_ID=parent->entries[j].inode_ID;
parent_inode_address = get_inode_address(parent_inode_ID);
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
break;
}
}
}
int file_loc;
for(file_loc=2;file_loc<MAX_DIR_ENTRIES;file_loc++){
if(!strcmp(parent->entries[file_loc].filename,tokens[path_len-1]) ){ break; };
}
assert(file_loc<MAX_DIR_ENTRIES);
inode_t* file_inode = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* sing_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
BYTE_t* doub_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
int file_inode_ID = parent->entries[file_loc].inode_ID;
int file_inode_address = get_inode_address(file_inode_ID);
read_block(file_inode_address,buf);
buffer_into_inode(file_inode,buf);
parent->entries[file_loc].inode_ID = 0;
strncpy(parent->entries[file_loc].filename,"",FILENAME_LEN);
dir_into_buffer(parent,buf);
write_block(parent_dir_block,buf);
assert(file_inode->flags == 0);
int blocks_to_erase = (file_inode->size / BLOCK_SIZE) + (!((file_inode->size%BLOCK_SIZE)==0));
BYTE_t* wipe = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
if(blocks_to_erase<=10){
for(int i = 0; i<blocks_to_erase; i++){
write_block(file_inode->blocks[i],wipe);
open_block(file_inode->blocks[i]);
}
} else if (blocks_to_erase>10 && blocks_to_erase<=266){
int block;
int di_blocks_erased;
int s_blocks_erased;
for(di_blocks_erased = 0; di_blocks_erased<10; di_blocks_erased++){
write_block(file_inode->blocks[di_blocks_erased],wipe);
open_block(file_inode->blocks[di_blocks_erased]);
}
blocks_to_erase -= di_blocks_erased;
read_block(file_inode->single_ind,sing_buf);
for(s_blocks_erased = 0; s_blocks_erased<blocks_to_erase; s_blocks_erased++){
block = ((sing_buf[2*s_blocks_erased] << 8) | (sing_buf[(2*s_blocks_erased)+1]) );
write_block(block,wipe);
open_block(block);
}
for(int i = s_blocks_erased;i<BLOCK_SIZE;i++){ sing_buf[i] = 0; }
write_block(file_inode->single_ind,wipe);
open_block(file_inode->single_ind);
} else {
int s_blocks_erased;
int block;
int sing_block;
for(int i = 0; i<10; i++){
write_block(file_inode->blocks[i],wipe);
open_block(file_inode->blocks[i]);
}
read_block(file_inode->single_ind,sing_buf);
for(s_blocks_erased = 0; s_blocks_erased<256; s_blocks_erased++){
block = ((sing_buf[2*s_blocks_erased] << 8) | (sing_buf[(2*s_blocks_erased)+1]) );
write_block(block,wipe);
open_block(block);
}
write_block(file_inode->single_ind,wipe);
open_block(file_inode->single_ind);
read_block(file_inode->double_ind,doub_buf);
for(int k = 0; k<256; k++){
sing_block = ((doub_buf[2*k] << 8) | (doub_buf[(2*k)+1]) );
if(sing_block == 0){ break; }
read_block(sing_block,sing_buf);
for(s_blocks_erased = 0; s_blocks_erased<256; s_blocks_erased++){
block = ((sing_buf[2*s_blocks_erased] << 8) | (sing_buf[(2*s_blocks_erased)+1]) );
if(block==0){ break; }
write_block(block,wipe);
open_block(block);
}
write_block(sing_block,wipe);
open_block(sing_block);
}
write_block(file_inode->double_ind,wipe);
open_block(file_inode->double_ind);
}
//
write_block(file_inode_address,wipe);
open_block(file_inode_address);
open_inode(file_inode_ID);
free(in);
free(buf);
free(parent);
free(file_inode);
free(sing_buf);
free(doub_buf);
free(wipe);
}
int block_empty(BYTE_t* buf){
for(int i = 0; i < BLOCK_SIZE; i++)
{
if(buf[i]!=0){
return 0;
}
}
return 1;
}
int inode_empty(BYTE_t* buf){
for(int i = 0; i < 32; i++)
{
if(buf[i]!=0){
return 0;
}
}
return 1;
}
void read_file(BYTE_t* path, FILE* file){
inode_t* in = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
dir_t* parent = (dir_t*)calloc(1,sizeof(dir_t));
BYTE_t new_path [MAX_FILE_PATH_LEN];
strncpy(new_path,path,MAX_FILE_PATH_LEN);
BYTE_t* tok;
const char* delim = "/";
tok = strtok(new_path,delim);
BYTE_t* tokens [5];
int path_len = 0;
while( tok != NULL ) {
tokens[path_len] = tok;
path_len++;
tok = strtok(NULL, delim);
}
int parent_dir_block = ROOT_DIR_BLOCK;
int parent_inode_ID = ROOT_INODE_ID;
int parent_inode_address = ROOT_INODE_BLOCK;
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
for(int i = 1;i<path_len-1;i++){ // reads thorugh intermediate directories on path
for(int j=2;j<MAX_DIR_ENTRIES;j++){ // first entry in directory is itself, parent is second so we can skip checking those(start at 2)
if(!(strncmp(tokens[i],parent->entries[j].filename,FILENAME_LEN))){
parent_inode_ID=parent->entries[j].inode_ID;
parent_inode_address = get_inode_address(parent_inode_ID);
read_block(parent_inode_address,buf);
buffer_into_inode(in,buf);
assert(in->flags==1);
parent_dir_block = in->blocks[0];
read_block(parent_dir_block,buf);
buffer_into_dir(parent,buf);
break;
}
}
}
int file_loc;
for(file_loc=2;file_loc<MAX_DIR_ENTRIES;file_loc++){
if(!strcmp(parent->entries[file_loc].filename,tokens[path_len-1]) ){ break; };
}
assert(file_loc<MAX_DIR_ENTRIES);
inode_t* file_inode = (inode_t*)calloc(1,sizeof(inode_t));
BYTE_t* sing_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
BYTE_t* doub_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
int file_inode_ID = parent->entries[file_loc].inode_ID;
int file_inode_address = get_inode_address(file_inode_ID);
read_block(file_inode_address,buf);
buffer_into_inode(file_inode,buf);
assert(file_inode->flags == 0);
int full_blocks = ((int)file_inode->size)/BLOCK_SIZE;
int extra_bytes = ((int)file_inode->size)%BLOCK_SIZE;
int blocks_to_read = full_blocks+(extra_bytes!=0);
if (blocks_to_read<=10) {
int i;
for(i = 0; i<full_blocks; i++){
read_block(file_inode->blocks[i],buf);
fwrite(buf,BLOCK_SIZE,1,file);
}
blocks_to_read-=full_blocks;
if(blocks_to_read!=0){
read_block(file_inode->blocks[i],buf);
fwrite(buf,extra_bytes,1,file);
}
} else if(blocks_to_read>10 && blocks_to_read<=266){
int block;
for(int i = 0; i<10; i++){
read_block(file_inode->blocks[i],buf);
fwrite(buf,BLOCK_SIZE,1,file);
}
full_blocks-=10;
blocks_to_read -= 10;
read_block(file_inode->single_ind,sing_buf);
int j;
for(j = 0; j<full_blocks; j++){
block = ((sing_buf[2*j] << 8) | (sing_buf[(2*j)+1]) );
read_block(block,buf);
fwrite(buf,BLOCK_SIZE,1,file);
}
blocks_to_read-=full_blocks;
if(blocks_to_read!=0){
block = ((sing_buf[2*j] << 8) | (sing_buf[(2*j)+1]) );
read_block(block,buf);
fwrite(buf,extra_bytes,1,file);
}
} else if (blocks_to_read > 266){ // if need more than 10 direct block and the 256 indirect
int block;
int sing_block;
for(int i = 0; i<10; i++){
read_block(file_inode->blocks[i],buf);
fwrite(buf,BLOCK_SIZE,1,file);
}
full_blocks-=10;
blocks_to_read -= 10;
read_block(file_inode->single_ind,sing_buf);
for(int j = 0; j<256; j++){
block = ((sing_buf[2*j] << 8) | (sing_buf[(2*j)+1]) );
read_block(block,buf);
fwrite(buf,BLOCK_SIZE,1,file);
}
blocks_to_read-=256;
full_blocks-=256;
read_block(file_inode->double_ind,doub_buf);
//int j;
int k;
int next;
int doubl = (blocks_to_read/256) + (!((blocks_to_read%256)==0)); // calculates how many single blocks need to be written in current iteration of double indirect block loop
int single_read;
for(k = 0; k<256; k++){
sing_block = ((doub_buf[2*k] << 8) | (doub_buf[(2*k)+1]) );
if(sing_block == 0){ break; }
read_block(sing_block,sing_buf);
single_read = 256*(blocks_to_read>256) + blocks_to_read*(blocks_to_read<=256);
for(int j = 0; j<single_read; j++){
block = ((sing_buf[2*j] << 8) | (sing_buf[(2*j)+1]) );
read_block(block,buf);
if(single_read!=256 && j==single_read-1){ fwrite(buf,extra_bytes,1,file); break; }
fwrite(buf,BLOCK_SIZE,1,file);
}
blocks_to_read-=single_read;
}
}
free(file_inode);
free(buf);
free(doub_buf);
free(sing_buf);
free(parent);
free(in);
}
void file_check(){
BYTE_t* check_buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
BYTE_t* buf = (BYTE_t*)calloc(BLOCK_SIZE,sizeof(BYTE_t));
read_block(1,check_buf); //read in free block vector
BYTE_t block_byte;
BYTE_t mask;
int bit_shift;
printf("Checking Blocks\n");
for(int block = 0; block<NUM_BLOCKS;block++){
block_byte = check_buf[block/8];
bit_shift = block%8;
mask = (0b10000000 >> (bit_shift));
read_block(block,buf);
if( (block_byte & mask)!=mask ){ // if block is marked as used
if(block_empty(buf)){ //if block is marked as used but it is empty, check will free it
if(block>=10){ //avoid the allocated block at the start that are empty
printf("Block %-4d Status: Empty but marked as used\n",block);
printf("Block %-4d Status: Opening...\n",block);
open_block(block);
printf("Block %-4d Status: Good\n",block);
} else {
printf("Block %-4d Status: Good\n",block);
}
} else {
printf("Block %-4d Status: Good\n",block);
}
} else { // block is marked as free
if(!block_empty(buf)){ //if block is marked as free but it is occupied, check will free it
printf("Block %-4d Status: Occupied but marked as free\n",block);
printf("Block %-4d Status: Closing...\n",block);
close_block(block);
printf("Block %-4d Status: Good\n",block);
} else {
printf("Block %-4d Status: Good\n",block);
}
}
}
read_block(2,check_buf);
int inode_address;
printf("Checking inodes\n");
for(int i = 0; i<256; i++){
inode_address = (check_buf[2*i] << 8) | (check_buf[(2*i)+1]) ;
if(!inode_address){
read_block(inode_address,buf);
if(!inode_empty(buf)){
printf("inode %-3d Status: Good\n",i);
} else {
printf("inode %-3d Status: Block allocated but inode is empty\n",i);
printf("inode %-3d Status: Opening inode ...\n",i);
open_inode(i);
printf("inode %-3d Status: Good\n",i);
}
}
}
free(check_buf);
free(buf);
}<file_sep>/A3/apps/test04.c
#include "../io/File.h"
int main(){
FILE* small_file = fopen("small_read.txt","wb+");
FILE* med_file = fopen("med_read.txt","wb+");
FILE* large_file = fopen("large_read.txt","wb+");
read_file("~/SmallFile",small_file);
read_file("~/csc360/MedFile",med_file);
read_file("~/csc360/assignments/LargeFile",large_file);
fclose(small_file);
fclose(med_file);
fclose(large_file);
return 0;
}<file_sep>/A3/apps/app.c
#include "../io/File.h"
int main(){
InitLLFS();
FILE* test = fopen("testfile.txt","rb+");
write_file("~/test.txt",test);
fclose(test);
FILE* frt = fopen("out.txt","wb+");
read_file("~/test.txt",frt);
fclose(frt);
return 0;
}<file_sep>/Kapish/kapish.c
#define _GNU_SOURCE
#include <stdio.h>
#include <signal.h>
#include <string.h>
#include <unistd.h>
#include <sys/wait.h>
#include <sys/types.h>
#include <stdlib.h>
void handle_sigint(int sig){ //control c signal handler
signal(SIGINT,handle_sigint);
}
#define INPUT_BUFFER 512
#define TOK_BUFFER 69
#define TOK_DELIM " \t\r\n\a" // delimeters are all whitespaces
char *built_in[] = {"cd","setenv","unsetenv","exit"}; //the built in commands
//branch for execvp()
int shell_launcher(char **args){
pid_t p_id;
p_id = fork();
if(p_id == 0){
execvp(args[0],args);
fprintf(stderr,"kapish: Invalid command ");
exit(EXIT_FAILURE);
} else if (p_id < 0) {
fprintf(stderr,"kapish: fork error ");
} else {
wait(NULL);
}
return 1;
}
//call chdir
int shell_cd(char **args){
if(args[1]!=NULL){
if(chdir(args[1])){
fprintf(stderr,"kapish: invalid path for cd ");
}
} else {chdir(getenv("HOME"));}
return 1;
}
//call setenv
int shell_setenv(char **args){
if(args[1]!=NULL){
if(args[2]==NULL){
setenv(args[1],"", 1);
return 1;
}
if( setenv(args[1], args[2], 1) ){
fprintf(stderr,"kapish: invalid argument for setenv");
}
} else{fprintf(stderr,"kapish: argument needed for setenv");}
return 1;
}
int shell_unsetenv(char **args){
if(args[1]!=NULL){
if(unsetenv(args[1]) != 0){
fprintf(stderr,"kapish: invalid argument for unsetenv");
}
} else {fprintf(stderr,"kapish: argument needed for unsetenv");}
return 1;
}
//exits kapish
int shell_exit(void){
return 0;
}
//reads in line
char *read_line(void){
int buffer_size = INPUT_BUFFER;
int pos = 0;
char *buf = malloc(sizeof(char) * buffer_size);
int c;
if(!buf){
fprintf(stderr, "kapish: allocation error"); //if malloc failed
exit(EXIT_FAILURE);
}
while(1){
c=getchar();
if(c == EOF && pos == 0){
free(buf);
printf("\n");
exit(0);
}
if(c == EOF || c == '\n' ){
buf[pos] = '\0';
return buf;
} else {
buf[pos]=c;
}
pos++;
if(pos>=buffer_size){
buffer_size += INPUT_BUFFER;
buf = realloc(buf,buffer_size*sizeof(char));
if(!buf){
fprintf(stderr, "kapish: allocation error");
exit(EXIT_FAILURE);
}
}
}
}
//tokenizes input
char **tokenize(char *input){
int buffer_size = TOK_BUFFER;
int pos = 0;
char **toks = malloc(sizeof(char) * buffer_size);
char *tok;
if(!toks){
fprintf(stderr, "kapish: allocation error");
exit(EXIT_FAILURE);
}
tok = strtok(input, TOK_DELIM);
while(tok !=NULL){
toks[pos] = tok;
pos++;
if(pos>=buffer_size){
buffer_size += TOK_BUFFER;
toks = realloc(toks,buffer_size * sizeof(char));
if(!toks){
fprintf(stderr, "kapish: allocation error");
exit(EXIT_FAILURE);
}
}
tok = strtok(NULL,TOK_DELIM);
}
toks[pos] = NULL;
return toks;
}
// will call a built in command if the input matches one of them, if not it will launch the execvp loop
int execute (char **args){
if(args[0]==NULL || args[0][0]=='#'){
return 1;
}
int index =0;
int size = sizeof(built_in) / sizeof(char*);
for(index=0; index<size;index++){
if(strcmp(built_in[index],args[0])==0){break;}
}
switch(index){
case 0:{
return shell_cd(args);
}
case 1:{
return shell_setenv(args);
}
case 2:{
return shell_unsetenv(args);
}
case 3:{
return shell_exit();
}
default:{
return shell_launcher(args); //handles non built in commands
}
}
return 0;
}
//will continuosly take input until it is told not to
void shell_loop(void){
char *line;
char **args;
int status;
do{
printf("\n? ");
line = read_line();
args = tokenize(line);
status = execute(args);
free(line);
free(args);
}while(status);
}
//reads in the .kapishrc from home directory
int rc_file_read(){
char *path = malloc(sizeof(char) * INPUT_BUFFER);
strcpy(path,getenv("HOME"));
strcat(path,"/.kapishrc");
FILE *rc = fopen(path,"r");
if(rc == NULL){
fprintf(stderr,"Error with .kapishrc and %s",path);
free(path);
return 0;
}
char **tokens;
char* line = malloc(sizeof(char) * INPUT_BUFFER);
while(fgets(line,INPUT_BUFFER,rc)){
tokens = tokenize(line);
execute(tokens);
free(tokens);
}
free(path);
free(line);
fclose(rc);
return 1;
}
int main(){
signal(SIGINT,handle_sigint); //handles control-c
rc_file_read(); //read .rc file and run commands
shell_loop(); //take input line commands
return EXIT_SUCCESS;
}
<file_sep>/A2/Nathan_Marcotte_CSC360_A2/README.md
<NAME>
V00876934
CSC 360 Assignment 2
Spring 2019
Threads
<file_sep>/A2/pc_mutex_cond_pthread.c
/* <NAME>
* CSC 360 Spring 2019
* V00876934
* pc_mutex_cond_pthread.c
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <pthread.h>
#define MAX_ITEMS 10
const int NUM_ITERATIONS = 200;
const int NUM_CONSUMERS = 2;
const int NUM_PRODUCERS = 2;
int producer_wait_count; // # of times producer had to wait
int consumer_wait_count; // # of times consumer had to wait
int histogram [MAX_ITEMS+1]; // histogram [i] == # of times list stored i items
pthread_mutex_t mutex;
pthread_cond_t max;
pthread_cond_t none;
int items = 0;
void* producer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
pthread_mutex_lock(&mutex);
while(items>=MAX_ITEMS){
producer_wait_count++;
pthread_cond_wait(&none,&mutex);
}
items++;
histogram[items]++;
assert(items<=MAX_ITEMS);
pthread_cond_signal(&max);
pthread_mutex_unlock(&mutex);
}
return NULL;
}
void* consumer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
pthread_mutex_lock(&mutex);
while(items<=0){
consumer_wait_count++;
pthread_cond_wait(&max,&mutex);
}
items--;
histogram[items]++;
assert(items>=0);
pthread_cond_signal(&none);
pthread_mutex_unlock(&mutex);
}
return NULL;
}
int main (int argc, char** argv) {
pthread_t t[4];
pthread_mutex_init(&mutex, NULL);
pthread_cond_init(&max,NULL);
pthread_cond_init(&none,NULL);
// TODO: Create Threads and Join
for(int i = 0;i<NUM_PRODUCERS;i++){
pthread_create(&t[i],NULL,&producer,NULL);
}
for(int i = NUM_PRODUCERS;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
pthread_create(&t[i],NULL,&consumer,NULL);
}
for(int i = 0;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
pthread_join(t[i],NULL);
}
pthread_mutex_destroy(&mutex);
pthread_cond_destroy(&max);
pthread_cond_destroy(&none);
printf ("producer_wait_count=%d\nconsumer_wait_count=%d\n", producer_wait_count, consumer_wait_count);
printf ("items value histogram:\n");
int sum=0;
for (int i = 0; i <= MAX_ITEMS; i++) {
printf (" items=%d, %d times\n", i, histogram [i]);
sum += histogram [i];
}
assert (sum == sizeof (t) / sizeof (pthread_t) * NUM_ITERATIONS);
}
<file_sep>/A3/apps/Makefile
VDISK = ../disk
IO = ../io
TARGETS = app test01 test02 test03 test04 test05 test06
OBJS = $(VDISK)/vdiskAPI.o $(IO)/File.o
JUNKF = $(OBJS) *~
JUNKD = *.dSYM
CFLAGS += -g -std=gnu11 -I$(VDISK)
all: $(TARGETS)
$(TARGETS): $(OBJS)
tidy:
rm -f $(JUNKF); rm -rf $(JUNKD)
clean:
rm -f $(JUNKF) $(TARGETS); rm -rf $(JUNKD)<file_sep>/A3/io/File.h
#ifndef FILE_H
#define FILE_H
#include "vdiskAPI.h"
#include <stdint.h>
#include <inttypes.h>
#include <string.h>
#define MAX_INODES 256
#define MAX_PATH_LEN 128
#define MAX_FILE_PATH_LEN 160
#define ROOT_INODE_BLOCK 10
#define MAX_DIR_ENTRIES 16
#define FILENAME_LEN 31
#define ROOT_DIR_BLOCK 11
#define ROOT_INODE_ID 1
typedef struct
{
uint32_t size;
uint32_t flags;
uint16_t blocks [10];
uint16_t single_ind;
uint16_t double_ind;
}inode_t;
typedef struct
{
uint8_t inode_ID;
BYTE_t filename [FILENAME_LEN];
}dir_entry_t;
typedef struct
{
dir_entry_t entries [MAX_DIR_ENTRIES];
}dir_t;
void InitLLFS(void);
void close_block(int block);
void open_block(int block);
int find_inode(void);
void close_inode(int inode_num, int block_address);
void open_inode(int inode_num);
int get_inode_address(int inode_num);
int find_block(void);
void print_inode(inode_t* inode);
void make_dir (BYTE_t* path);
void remove_dir (BYTE_t* path);
void make_root_dir (void);
void print_buf(BYTE_t* buffer);
void inode_into_buffer(inode_t* inode, BYTE_t* buffer);
void buffer_into_inode(inode_t* inode, BYTE_t* buffer);
void dir_into_buffer(dir_t* dir, BYTE_t* buffer);
void buffer_into_dir(dir_t* dir, BYTE_t* buffer);
void write_file(BYTE_t* path, FILE* file);
void read_file(BYTE_t* path, FILE* file);
void remove_file(BYTE_t* path);
int block_empty(BYTE_t* buf);
int inode_empty(BYTE_t* buf);
void file_check(void);
#endif<file_sep>/A2/Nathan_Marcotte_CSC360_A2/pc_spinlock_uthread.c
/* <NAME>
* CSC 360 Spring 2019
* V00876934
* pc_spinlock_uthread.c
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include "uthread.h"
#include "uthread_mutex_cond.h"
#include "spinlock.h"
#define MAX_ITEMS 10
const int NUM_ITERATIONS = 200;
const int NUM_CONSUMERS = 2;
const int NUM_PRODUCERS = 2;
int producer_wait_count; // # of times producer had to wait
int consumer_wait_count; // # of times consumer had to wait
int histogram [MAX_ITEMS+1]; // histogram [i] == # of times list stored i items
spinlock_t lock;
spinlock_t prod_lock;
spinlock_t cons_lock;
int items = 0;
void* producer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
while(1){
while(items>=MAX_ITEMS){
spinlock_lock(&prod_lock);
producer_wait_count++;
spinlock_unlock(&prod_lock);
}
spinlock_lock(&lock);
if(items >= MAX_ITEMS){
spinlock_unlock(&lock);
} else {
break;
}
}
items++;
histogram[items]++;
assert(items<=MAX_ITEMS);
spinlock_unlock(&lock);
}
return NULL;
}
void* consumer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
while(1){
while(items<=0){
spinlock_lock(&cons_lock);
consumer_wait_count++;
spinlock_unlock(&cons_lock);
}
spinlock_lock(&lock);
if(items <= 0){
spinlock_unlock(&lock);
} else {
break;
}
}
items--;
histogram[items]++;
assert(items>=0);
spinlock_unlock(&lock);
}
return NULL;
}
int main (int argc, char** argv) {
uthread_t t[4];
uthread_init (4);
spinlock_create(&lock);
spinlock_create(&prod_lock);
spinlock_create(&cons_lock);
// TODO: Create Threads and Join
for(int i = 0;i<NUM_PRODUCERS;i++){
t[i]=uthread_create(producer,NULL);
}
for(int i = NUM_PRODUCERS;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
t[i]=uthread_create(consumer,NULL);
}
for(int i = 0;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
uthread_join(t[i],0);
}
printf ("producer_wait_count=%d\nconsumer_wait_count=%d\n", producer_wait_count, consumer_wait_count);
printf ("items value histogram:\n");
int sum=0;
for (int i = 0; i <= MAX_ITEMS; i++) {
printf (" items=%d, %d times\n", i, histogram [i]);
sum += histogram [i];
}
assert (sum == sizeof (t) / sizeof (uthread_t) * NUM_ITERATIONS);
}
<file_sep>/A2/Nathan_Marcotte_CSC360_A2/pc_sem_pthread.c
/* <NAME>
* CSC 360 Spring 2019
* V00876934
* pc_sem_pthread.c
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <pthread.h>
#include <semaphore.h>
#define MAX_ITEMS 10
const int NUM_ITERATIONS = 200;
const int NUM_CONSUMERS = 2;
const int NUM_PRODUCERS = 2;
int histogram [MAX_ITEMS+1]; // histogram [i] == # of times list stored i items
int items = 0;
sem_t lock;
sem_t can_prod;
sem_t can_cons;
void* producer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
// TODO
sem_wait(&can_prod);
sem_wait(&lock);
items++;
histogram[items]++;
assert(items<=MAX_ITEMS);
sem_post(&lock);
sem_post(&can_cons);
}
return NULL;
}
void* consumer (void* v) {
for (int i=0; i<NUM_ITERATIONS; i++) {
// TODO
sem_wait(&can_cons);
sem_wait(&lock);
items--;
histogram[items]++;
assert(items>=0);
sem_post(&lock);
sem_post(&can_prod);
}
return NULL;
}
int main (int argc, char** argv) {
pthread_t t[4];
sem_init(&lock,0,1);
sem_init(&can_prod,0,MAX_ITEMS);
sem_init(&can_cons,0,0);
for(int i = 0;i<NUM_PRODUCERS;i++){
pthread_create(&t[i],NULL,producer,NULL);
}
for(int i = NUM_PRODUCERS;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
pthread_create(&t[i],NULL,consumer,NULL);
}
for(int i = 0;i<NUM_PRODUCERS+NUM_CONSUMERS;i++){
pthread_join(t[i],0);
}
// TODO: Create Threads and Join
printf ("items value histogram:\n");
int sum=0;
for (int i = 0; i <= MAX_ITEMS; i++) {
printf (" items=%d, %d times\n", i, histogram [i]);
sum += histogram [i];
}
assert (sum == sizeof (t) / sizeof (pthread_t) * NUM_ITERATIONS);
}
<file_sep>/Kapish/README.md
Code inspired by the tutorial : https://brennan.io/2015/01/16/write-a-shell-in-c/
<NAME>
V00876934
CSC 360 : Spring 2019
Kapish<file_sep>/Kapish/Makefile
CC=gcc
kapish : kapish.o
$(CC) kapish.o -o kapish
kapish.o : kapish.c
$(CC) -c -std=c11 -Wall -Werror kapish.c -o kapish.o
clean:
-rm kapish kapish.o
<file_sep>/A3/apps/test01.c
#include "../io/File.h"
int main(){
InitLLFS();
return 0;
}<file_sep>/A3/apps/tester.sh
#!/bin/bash
make all
echo "Test 01: Disk Initialization > hexdump stored in init_disk.txt"
./test01
hexdump -C ../disk/vdisk > init_disk.txt
echo ""
echo "Test 02: Directory Creation > hexdump stored in directories_disk.txt"
./test02
hexdump -C ../disk/vdisk > directories_disk.txt
echo ""
echo "Test 03: File Writing > hexdump stored in files_disk.txt"
./test03
hexdump -C ../disk/vdisk > files_disk.txt
echo ""
echo "Test 04: File Reading"
./test04
echo " Testing difference between original files and files read from vdisk:"
echo " Small sized file diff: "
diff small.txt small_read.txt
echo " Medium sized file diff: "
diff med.txt med_read.txt
echo " Large sized file diff: "
diff large.txt large_read.txt
echo ""
echo "Test 05: Directory and File Removal > hexdump stored in removal_disk.txt"
./test05
hexdump -C ../disk/vdisk > removed_disk.txt
echo " vdisk will now be restored to the initialized state"
echo " diff for initialized state and state after removals: "
diff init_disk.txt removed_disk.txt
echo ""
echo "Test 06: File Check > file check log stored in check_log.txt"
./test06 > check_log.txt
echo ""
echo "Testing Complete."<file_sep>/A2/Nathan_Marcotte_CSC360_A2/smoke_pthread.c
/* <NAME>
* CSC 360 Spring 2019
* V00876934
* smoke_pthread.c
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <fcntl.h>
#include <unistd.h>
#include <pthread.h>
//#include "uthread_mutex_cond.h"
#define NUM_ITERATIONS 1000
#ifdef VERBOSE
#define VERBOSE_PRINT(S, ...) printf (S, ##__VA_ARGS__);
#else
#define VERBOSE_PRINT(S, ...) ;
#endif
pthread_cond_t bac_mat;
pthread_cond_t pap_mat;
pthread_cond_t bac_pap;
int sum = 0;
struct Agent {
pthread_mutex_t mutex;
pthread_cond_t match;
pthread_cond_t paper;
pthread_cond_t tobacco;
pthread_cond_t smoke;
};
struct Agent* createAgent() {
struct Agent* agent = malloc (sizeof (struct Agent));
pthread_mutex_init(&agent->mutex,NULL);
pthread_cond_init (&agent->paper,NULL);
pthread_cond_init (&agent->match,NULL);
pthread_cond_init (&agent->tobacco,NULL);
pthread_cond_init (&agent->smoke,NULL);
return agent;
}
//
// TODO
// You will probably need to add some procedures and struct etc.
//
/**
* You might find these declarations helpful.
* Note that Resource enum had values 1, 2 and 4 so you can combine resources;
* e.g., having a MATCH and PAPER is the value MATCH | PAPER == 1 | 2 == 3
*/
enum Resource { MATCH = 1, PAPER = 2, TOBACCO = 4};
char* resource_name [] = {"", "match", "paper", "", "tobacco"};
int signal_count [5]; // # of times resource signalled
int smoke_count [5]; // # of times smoker with resource smoked
/**
* This is the agent procedure. It is complete and you shouldn't change it in
* any material way. You can re-write it if you like, but be sure that all it does
* is choose 2 random reasources, signal their condition variables, and then wait
* wait for a smoker to smoke.
*/
void* agent (void* av) {
struct Agent* a = av;
static const int choices[] = {MATCH|PAPER, MATCH|TOBACCO, PAPER|TOBACCO};
static const int matching_smoker[] = {TOBACCO, PAPER, MATCH};
pthread_mutex_lock (&a->mutex);
for (int i = 0; i < NUM_ITERATIONS; i++) {
int r = random() % 3;
signal_count [matching_smoker [r]] ++;
int c = choices [r];
if (c & MATCH) {
VERBOSE_PRINT ("match available\n");
pthread_cond_signal (&a->match);
}
if (c & PAPER) {
VERBOSE_PRINT ("paper available\n");
pthread_cond_signal (&a->paper);
}
if (c & TOBACCO) {
VERBOSE_PRINT ("tobacco available\n");
pthread_cond_signal (&a->tobacco);
}
VERBOSE_PRINT ("agent is waiting for smoker to smoke\n");
pthread_cond_wait (&a->smoke,&a->mutex);
}
pthread_mutex_unlock (&a->mutex);
return NULL;
}
void* get_smoker(int value){
switch(value){
case TOBACCO + PAPER:
VERBOSE_PRINT("Get Match");
pthread_cond_signal(&bac_pap);
sum = 0;
break;
case TOBACCO + MATCH:
VERBOSE_PRINT("Get Paper");
pthread_cond_signal(&bac_mat);
sum = 0;
break;
case MATCH + PAPER:
VERBOSE_PRINT("Get Tobacco");
pthread_cond_signal(&pap_mat);
sum = 0;
break;
default :
break;
}
}
void* handle_bac(void* agent){
struct Agent* ag = agent;
pthread_mutex_lock(&ag->mutex);
while(1){
pthread_cond_wait(&ag->tobacco,&ag->mutex);
sum+=TOBACCO;
get_smoker(sum);
}
pthread_mutex_unlock(&ag->mutex);
}
void* handle_pap(void* agent){
struct Agent* ag = agent;
pthread_mutex_lock(&ag->mutex);
while(1){
pthread_cond_wait(&ag->paper,&ag->mutex);
sum+=PAPER;
get_smoker(sum);
}
pthread_mutex_unlock(&ag->mutex);
}
void* handle_mat(void* agent){
struct Agent* ag = agent;
pthread_mutex_lock(&ag->mutex);
while(1){
pthread_cond_wait(&ag->match,&ag->mutex);
sum+=MATCH;
get_smoker(sum);
}
pthread_mutex_unlock(&ag->mutex);
}
void* smoke_bac(void* agent){
struct Agent* ag = agent;
pthread_mutex_lock(&ag->mutex);
while(1){
pthread_cond_wait(&pap_mat,&ag->mutex);
VERBOSE_PRINT("Tobacco Smoking");
pthread_cond_signal(&ag->smoke);
smoke_count[TOBACCO]++;
}
pthread_mutex_unlock(&ag->mutex);
}
void* smoke_pap(void* agent){
struct Agent* ag = agent;
pthread_mutex_lock(&ag->mutex);
while(1){
pthread_cond_wait(&bac_mat,&ag->mutex);
VERBOSE_PRINT("Paper Smoking");
pthread_cond_signal(&ag->smoke);
smoke_count[PAPER]++;
}
pthread_mutex_unlock(&ag->mutex);
}
void* smoke_mat(void* agent){
struct Agent* ag = agent;
pthread_mutex_lock(&ag->mutex);
while(1){
pthread_cond_wait(&bac_pap,&ag->mutex);
VERBOSE_PRINT("Match Smoking");
pthread_cond_signal(&ag->smoke);
smoke_count[MATCH]++;
}
pthread_mutex_unlock(&ag->mutex);
}
int main (int argc, char** argv) {
struct Agent* a = createAgent();
pthread_cond_init(&bac_mat,NULL);
pthread_cond_init(&pap_mat,NULL);
pthread_cond_init(&bac_pap,NULL);
// TODO
pthread_t ag,hb,hp,hm,sb,sp,sm;
pthread_create(&hb,NULL,handle_bac,a);
pthread_create(&hp,NULL,handle_pap,a);
pthread_create(&hm,NULL,handle_mat,a);
pthread_create(&sb,NULL,smoke_bac,a);
pthread_create(&sp,NULL,smoke_pap,a);
pthread_create(&sm,NULL,smoke_mat,a);
pthread_create(&ag,NULL,agent,a);
pthread_join (ag, 0);
assert (signal_count [MATCH] == smoke_count [MATCH]);
assert (signal_count [PAPER] == smoke_count [PAPER]);
assert (signal_count [TOBACCO] == smoke_count [TOBACCO]);
assert (smoke_count [MATCH] + smoke_count [PAPER] + smoke_count [TOBACCO] == NUM_ITERATIONS);
printf ("Smoke counts: %d matches, %d paper, %d tobacco\n",
smoke_count [MATCH], smoke_count [PAPER], smoke_count [TOBACCO]);
} | 9655ca9a66e5a971819695990ef0a555aed3c101 | [
"Markdown",
"C",
"Makefile",
"Shell"
] | 22 | C | BigUnit/CSC360 | 0df0df4c964333fba8290f807bae23daaa4c59fe | 0c9f6b4909c2b1bedbe84eb6f48907017baa6bac |
refs/heads/master | <file_sep>alert('Ciao sono javascript e sono al tuo servizio');
console.log=('alert ciao sono javascript');
// scritta iniziale su PS5
document.getElementById('new-better').innerHTML='New is always Better';
console.log=('secondo elemento aggiuntivo ps5');
// scritta su PS5 descrizione prodotto
document.getElementById('style').innerHTML='Play with style';
// scritta su PS4
document.getElementById('subtitle-ps4').innerHTML='Old but Gold';
console.log=("elemento aggiuntivo ps5"); | f0e0d038a3a319dbaaf26e82ee3e38efcca6db49 | [
"JavaScript"
] | 1 | JavaScript | parix1999/htmlcss-playstation | 0f484df7229128a894d03f582d49cee858c8ea50 | 5ef3ead6217131ddef1f1e845b518a8d77fc90a2 |
refs/heads/master | <file_sep>#include <iostream>
#include <string>
#include <vector>
// letters sorted by frequency (just becase)
// const std::vector<char>
// letters_by_freq{E,T,A,O,I,N,S,H,R,D,L,C,U,M,W,F,G,Y,P,B,V,K,J,X,Q,Z}
// Modified odd-even algorithm for sorting the answer
// I chose odd-even sort, because I'd written it before
void
sort_answer(std::vector<int>& vec, std::string& str)
{
int vec_len = vec.size(), is_vec_sorted = 0;
while (!(is_vec_sorted)) {
is_vec_sorted = 1;
for (int i = 1; i < vec_len - 1; i += 2) {
if (vec[i] < vec[i + 1]) {
std::swap(vec[i], vec[i + 1]);
std::swap(str[i], str[i + 1]);
is_vec_sorted = 0;
}
}
for (int i = 0; i < vec_len - 1; i += 2) {
if (vec[i] < vec[i + 1]) {
std::swap(vec[i], vec[i + 1]);
std::swap(str[i], str[i + 1]);
is_vec_sorted = 0;
}
}
}
}
void
print_lv(std::string& letters, std::vector<int>& l_freq)
{
for (char i : letters) { // printing
std::cout << l_freq[letters.find(i)] << " - " << i << " | ";
}
std::cout << std::endl;
}
void
replace_letter(std::string& text,
std::string letters,
char target,
char replacement)
{
short unsigned int position = 0;
position = letters.find(target);
letters[position] = replacement;
for (char& i : text) {
if (i == target) {
std::cout << "Checking \"npos\" " << position << std::endl;
if (position != std::string::npos) {
i = replacement;
}
}
}
}
int
main()
{
// there is a hardcoded text,
// because I didn't want to mess with copying the text multiple times
// if something goes wrong (I've done this just for time saving and laziness)
std::string text =
"PIBIE KO IE IXNLKBMUIZW, WX OVXKEZ WT KOUIEQO, WE VLM MIOVMXE MQZM WT "
"IOKI. VLMXM IXM TWCX RIKE KOUIEQO – LWHHIKQW, LWEOLC, OLKHWHC IEQ HYCOLC. "
"VLMXM IXM IUOW EMIXUY 4,000 ORIUUMX KOUIEQO, VWW! PIBIE”O EMIXMOV "
"RIKEUIEQ EMKZLJWXO IXM VLM OKJMXKIE XMZKWE WT XCOOKI KE VLM EWXVL, IEQ "
"HWXMI IEQ NLKEI TIXVLMX OWCVL. IURWOV TWCX-TKTVLO WT PIBIE KO NWAMXMQ "
"DKVL RWCEVIKEO. VLM PIBIEMOM IUBO XCE QWDE VLM NMEVXM WT VLM UIXZMOV "
"KOUIEQ, LWEOLC. VLM LKZLMOV BMIH IEQ PIBIE’O RWOV TIRWCO RWCEVIKE KO "
"RWCEV TCPK, I NWEM-OLIBMQ AWUNIEW NWEOKQMXMQ OINXMQ JY RIEY PIBIEMOM. "
"PIBIE NIE JM I QIEZMXWCO BUINM. VLXMM WT VLM VMNVWEKN BUIVMO VLIV TWXR "
"VLM MIXVL”O NXCOV RMMV EMIXJY IEQ WTVME RWAM IZIKEOV MINL WVLMX, NICOKEZ "
"MIXVLFCIHMO. RWXM VLIE I VLWCOIEQ MIXVLFCIHMO LKV PIBIE MAMXY YMIX. PIBIE "
"IUOW LIO IJWCV 200 AWUNIEWMO, 60 WT DLKNL IXM OVKUU INVKAM. VLM PIBIEMOM "
"BMWBUM LIAM I QMMB ITTMNVKWE TWX VLM JMICVY WT VLM UIEQONIBM. VLM IENKMEV "
"OLKEVW XMUKZKWE OIYO EIVCXIU TMIVCXMO UKHM RWCEVIKEO, DIVMXTIUUO IEQ "
"TWXMOVO LIAM VLMKX WDE OBKXKV WX OWCU. PIBIE KO LWRM VW OWRM DWEQMXTCU "
"DKUQUKTM! OWRM WT VLM NWCEVXY’O RWOV KENXMQKJUM NXMIVCXMO KENUCQM VLM "
"OKHI QMMX, XMQ-NXWDEMQ NXIEM, OVMUUIX’O OMI MIZUM IEQ WEM WT VLM NWWUMOV "
"NXKVVMXO WE VLM BUIEMV...VLM PIBIEMOM RINIFCM RWEHMY! RWOV WT PIBIE KO "
"NWAMXMQ JY NWCEVXYOKQM – JCV DKVL RWXM VLIE 100 RKUUKWE BMWBUM UKAKEZ KE "
"OCNL I ORIUU BUINM, OIQUY, DKUQUKTM LIO OCTTMXMQ. IUVLWCZL BWUUCVKWE KO "
"EWD VKZLVUY NWEVXWUUMQ, WAMX VLM YMIXO, XWIQ JCKUQKEZ IEQ WVLMX LCRIE "
"INVKAKVKMO LIAM LIXRMQ EIVCXIU LIJKVIVO. VWQIY, IJWCV 136 OBMNKMO KE "
"PIBIE IXM UKOVMQ IO MEQIEZMXMQ. WTT VLM NWIOV WT VLKO KENXMQKJUM NWCEVXY, "
"VLM DIXR VOCOLKRI NCXXMEV TUWDO TXWR VLM OWCVL KEVW VLM OMI WT PIBIE, "
"DLMXM KV RMMVO I NWUQMX NCXXMEV TXWR VLM EWXVL. IO I XMOCUV WT VLM RKGKEZ "
"WT QKTTMXMEV DIVMXO, VLM OMIO IXWCEQ PIBIE IXM AMXY XKNL KE TKOL IEQ "
"WVLMX OMI UKTM. NWWU!";
std::string letters = "";
std::vector<int> l_freq{};
for (char i : text) { // counting letters
if (std::isalpha(i)) {
if (letters.find(i) == std::string::npos) {
letters.push_back(i);
l_freq.push_back(1);
} else {
l_freq[letters.find(i)] += 1;
}
}
}
sort_answer(l_freq, letters); // sorting
// the idea was to sort letters in another direction,
// but I had limited time, so who cares
char action = '0';
std::string text_backup = text;
std::string letters_backup = letters;
char target = ' ';
char replacement = ' ';
while (action != 'e') {
std::cout << text << std::endl;
print_lv(letters, l_freq);
std::cin >> action;
switch (action) {
case 'r': {
text = text_backup;
letters = letters_backup;
break;
};
case 's': {
std::cout << "Enter target and its replacement"
<< "(separating them with a whitespace)" << std::endl;
std::cin >> target >> replacement;
replace_letter(text, letters, target, replacement);
break;
};
// case "": {
// break;
// }
}
}
return 0;
}
<file_sep># ControlWorks
The result:\
japan is an archipelago, or string of islands, on the eastern edge of asia. there are four main islands – hokkaido, honshu, shikoku and kyushu. there are also nearly 4,000 smaller islands, too! japan”s nearest mainland neighbors are the siberian region of russia in the north, and korea and china farther south. almost four-fifths of japan is covered with mountains. the japanese alps run down the centre of the largest island, honshu. the highest peak and japan’s most famous mountain is mount fuji, a cone-shaped volcano considered sacred by many japanese. japan can be a dangerous place. three of the tectonic plates that form the earth”s crust meet nearby and often move against each other, causing earthquakes. more than a thousand earthquakes hit japan every year. japan also has about 200 volcanoes, 60 of which are still active. the japanese people have a deep affection for the beauty of the landscape. the ancient shinto religion says natural features like mountains, waterfalls and forests have their own spirit or soul. japan is home to some wonderful wildlife! some of the country’s most incredible creatures include the sika deer, red-crowned crane, stellar’s sea eagle and one of the coolest critters on the planet...the japanese macaque monkey! most of japan is covered by countryside – but with more than 100 million people living in such a small place, sadly, wildlife has suffered. although pollution is now tightly controlled, over the years, road building and other human activities have harmed natural habitats. today, about 136 species in japan are listed as endangered. off the coast of this incredible country, the warm tsushima current flows from the south into the sea of japan, where it meets a colder current from the north. as a result of the mixing of different waters, the seas around japan are very rich in fish and other sea life. cool!
| 8247caa277cb3baad998d97858aa457dabceb43b | [
"Markdown",
"C++"
] | 2 | C++ | Fe-Ti/CtrlWorks | a0c60b6e77c7250f647625b17e28d0bd134fd723 | 34bf8c7884ee5caba5feb2a42df3e1fc71043cd8 |
refs/heads/master | <file_sep>#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include <boost/bind.hpp>
#include <boost/asio.hpp>
#include <boost/asio/ssl.hpp>
#include <QDebug>
#include <QFile>
enum {
max_length = 1024
};
class client {
public:
client(boost::asio::io_service& io_service, boost::asio::ssl::context& context,
boost::asio::ip::tcp::resolver::iterator endpoint_iterator)
: socket_(io_service, context) {
boost::asio::ip::tcp::endpoint endpoint = *endpoint_iterator;
socket_.lowest_layer().async_connect(endpoint,
boost::bind(&client::handle_connect, this,
boost::asio::placeholders::error, ++endpoint_iterator));
}
std::string get_password() const {
return "<PASSWORD>";
}
void handle_connect(const boost::system::error_code& error,
boost::asio::ip::tcp::resolver::iterator endpoint_iterator) {
if (!error) {
socket_.async_handshake(boost::asio::ssl::stream_base::client,
boost::bind(&client::handle_handshake, this,
boost::asio::placeholders::error));
} else if (endpoint_iterator != boost::asio::ip::tcp::resolver::iterator()) {
socket_.lowest_layer().close();
boost::asio::ip::tcp::endpoint endpoint = *endpoint_iterator;
socket_.lowest_layer().async_connect(endpoint,
boost::bind(&client::handle_connect, this,
boost::asio::placeholders::error, ++endpoint_iterator));
} else {
qDebug() << "Connect failed: " << error;
}
}
void handle_handshake(const boost::system::error_code& error) {
if (!error) {
//std::cout << "Enter message: ";
//std::cin.getline(request_, max_length);
strcpy(request_, "Hello");
size_t request_length = 5;
boost::asio::async_write(socket_,
boost::asio::buffer(request_, request_length),
boost::bind(&client::handle_write, this,
boost::asio::placeholders::error,
boost::asio::placeholders::bytes_transferred));
} else {
qDebug() << "Handshake failed: " << error << "\n";
}
}
void handle_write(const boost::system::error_code& error,
size_t bytes_transferred) {
if (!error) {
boost::asio::async_read(socket_,
boost::asio::buffer(reply_, bytes_transferred),
boost::bind(&client::handle_read, this,
boost::asio::placeholders::error,
boost::asio::placeholders::bytes_transferred));
} else {
qDebug() << "Write failed: " << error << "\n";
}
}
void handle_read(const boost::system::error_code& error,
size_t bytes_transferred) {
if (!error) {
qDebug() << "Reply: ";
//std::cout.write(reply_, bytes_transferred);
qDebug() << reply_;
qDebug() << "\n";
} else {
qDebug() << "Read failed: " << error << "\n";
}
}
private:
boost::asio::ssl::stream<boost::asio::ip::tcp::socket> socket_;
char request_[max_length];
char reply_[max_length];
};
void exportFromResource(const QString &path, const QString &output_path)
{
QFile f(path);
QFile outfile(output_path);
if (!f.open(QIODevice::ReadOnly) || !outfile.open(QIODevice::WriteOnly)) return;
outfile.write(f.readAll());
f.close();
outfile.close();
}
int main(int argc, char *argv[])
{
QGuiApplication app(argc, argv);
boost::asio::io_service io_service;
boost::asio::ip::tcp::resolver resolver(io_service);
boost::asio::ip::tcp::resolver::query query("192.168.88.2", "4444");
boost::asio::ip::tcp::resolver::iterator iterator = resolver.resolve(query);
boost::asio::ssl::context ctx(io_service, boost::asio::ssl::context::sslv23);
ctx.set_options(
boost::asio::ssl::context::default_workarounds
| boost::asio::ssl::context::no_sslv2
| boost::asio::ssl::context::single_dh_use);
ctx.set_verify_mode(boost::asio::ssl::context::verify_peer | boost::asio::ssl::context::verify_fail_if_no_peer_cert);
exportFromResource(":/cert/certs/server.crt", "./server.crt");
ctx.load_verify_file("./server.crt");
ctx.use_certificate_chain_file("./server.crt");
exportFromResource(":/cert/certs/server.key", "./server.key");
ctx.use_private_key_file("./server.key", boost::asio::ssl::context::pem);
exportFromResource(":/cert/certs/dh512.pem", "./dh512.pem");
ctx.use_tmp_dh_file("./dh512.pem");
client c(io_service, ctx, iterator);
io_service.run();
QQmlApplicationEngine engine;
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
return app.exec();
}
<file_sep>#!/bin/bash
g++ server.cpp -lboost_system -lssl -lcrypto -ldl -lpthread -o server<file_sep>TEMPLATE = app
QT += qml quick
CONFIG += c++11
SOURCES += main.cpp
RESOURCES += qml.qrc
INCLUDEPATH += ../libs/OpenSSL-for-Android-Prebuilt/openssl-1.0.2/include
LIBS += -L../libs/OpenSSL-for-Android-Prebuilt/openssl-1.0.2/armeabi-v7a/lib -lcrypto -lssl
LIBS += -L../libs/Boost-for-Android-Prebuilt/boost_1_53_0/armeabi-v7a/lib -lboost_system-gcc-mt-1_53
INCLUDEPATH += ../libs/Boost-for-Android-Prebuilt/boost_1_53_0/include
# Additional import path used to resolve QML modules in Qt Creator's code model
QML_IMPORT_PATH =
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
contains(ANDROID_TARGET_ARCH,armeabi-v7a) {
ANDROID_EXTRA_LIBS = \
../libs/OpenSSL-for-Android-Prebuilt/openssl-1.0.2/armeabi-v7a/lib/libcrypto.so \
../libs/OpenSSL-for-Android-Prebuilt/openssl-1.0.2/armeabi-v7a/lib/libssl.so \
../libs/OpenSSL-for-Android-Prebuilt/openssl-1.0.2/armeabi-v7a/lib/libcrypto_1_0_0.so \
../libs/OpenSSL-for-Android-Prebuilt/openssl-1.0.2/armeabi-v7a/lib/libssl_1_0_0.so
}
#DISTFILES += \
# certs/AndroidManifest.xml \
# certs/gradle/wrapper/gradle-wrapper.jar \
# certs/gradlew \
# certs/res/values/libs.xml \
# certs/build.gradle \
# certs/gradle/wrapper/gradle-wrapper.properties \
# certs/gradlew.bat
#ANDROID_PACKAGE_SOURCE_DIR = $$PWD/certs
| 8353e6b83f11c7280f82d2c6d9b4f93e838d6a3d | [
"INI",
"C++",
"Shell"
] | 3 | C++ | zloiia/android_boost_mutual_auth_proof | fd53d5c2b89c202fdc7ea42a289778bbaccfdb4d | 623ae0ae3ba53c1e5b1d3733aa192b2792cb1129 |
refs/heads/master | <repo_name>DanDev82/Random-Quote-Generator<file_sep>/js/index.js
// event listener to respond to "Show another quote" button clicks
// when user clicks anywhere on the button, the "printQuote" function is called
document.getElementById('loadQuote').addEventListener("click", printQuote, false);
// Globally Accessible
var output = '';
var quotes = [
{quote: 'You can\'t always get what you want.', source: 'The Rolling Stones', citation: 'Let It Bleed', year: 1969, tag: 'Music'},
{quote: 'Would I rather be feared, or loved? Easy. Both. I want people to be afraid of how much they love me.', source: '<NAME>', citation: 'The Office', year: 2005, tag: 'Humor'},
{quote: 'Stay thirsty my friends.', source: 'The Most Interesting Man in the World', citation: 'Dos Equis', year: 2006},
{quote: 'I\'m gonna make him an offer he can\'t refuse.', source: '<NAME>', citation: 'The Godfather', year: 1972, tag: 'Movies'},
{quote: 'Frankly, my dear, I don\'t give a damn.', source: 'Rhett Butler', citation: 'Gone With The Wind', year: 1939, tag: 'Movies'},
{quote: 'Go ahead, make my day.', source: '<NAME>', citation: 'Sudden Impact', year: 1983, tag: 'Movies'},
{quote: 'E.T. phone home.', source: 'E.T.', citation: 'E.T.', year: 1982, tag: 'Movies'},
{quote: 'I\'ll have what she\'s having.', source: '<NAME>', citation: 'When Harry Met Sally', year: 1989, tag: 'Movies'},
{quote: 'You don\'t need to change the world. Just change yours.', source: '<NAME>', citation: 'Himself', year: 2016, tag: 'Lifestyle'}
];
//Prints getRandomQuote function
function printQuote() {
var randomQuote = getRandomQuote();
output = document.getElementById('quote-box').innerHTML = '<p class="quote">' + randomQuote.quote + '</p><p class="source">' + randomQuote.source + '<span class="citation">' + randomQuote.citation + '</span>' + '<span class="year">' + randomQuote.year + '</span></p>';
if (randomQuote.citation) {
output += '<span class="citation">' + randomQuote.citation + '</span>';
} else {
output += '';
}
if (randomQuote.year) {
output += '<span class="year">' + randomQuote.year + '</span>';
} else {
output += '';
}
if (randomQuote.tag) {
output += '<span class="tag">' + randomQuote.tag + '</span>';
} else {
output += '';
}
}
//Gets a random object to be called by printQuote
function getRandomQuote(){
var thisQuote = Math.floor(Math.random() * quotes.length);
return quotes[thisQuote];
}<file_sep>/README.txt
A Pen created at CodePen.io. You can find this one at https://codepen.io/DanDev/pen/yoYmWv.
| 67658ff9193c4786a4e522b5631fd4f921b90508 | [
"JavaScript",
"Text"
] | 2 | JavaScript | DanDev82/Random-Quote-Generator | d1868bf2ed53ae571fb42d25795e79d6a554d322 | 88f1089efd48a8f2c195a100efa666a2eda8231b |
refs/heads/master | <file_sep>package jp.techacademy.naoto.ichihashi.taskapp
import io.realm.RealmObject
import io.realm.annotations.PrimaryKey
import java.io.Serializable
import java.util.*
open class Task:RealmObject(),Serializable {
var category:String=""
var title:String = ""
var contents:String =""
var date: Date = Date()
@PrimaryKey
var id : Int = 0
}<file_sep>package jp.techacademy.naoto.ichihashi.taskapp
import android.app.AlarmManager
import android.app.PendingIntent
import android.content.Intent
import android.os.Bundle
import android.support.v7.app.AlertDialog
import android.support.v7.app.AppCompatActivity
import android.util.Log
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.Spinner
import io.realm.Realm
import io.realm.RealmChangeListener
import io.realm.Sort
import kotlinx.android.synthetic.main.activity_main.*
import java.io.Serializable
import java.util.*
const val EXTRA_TASK = "jp.techacademy.naoto.ichihashi.taskapp.TASK"
class MainActivity : AppCompatActivity() {
//Realmを保持するmRealmをプロパティ定義
private lateinit var mRealm: Realm
//カテゴリ管理用の配列を用意.set型を使うことで重複を回避
private var category_list = mutableSetOf<String?>()
private val mRealmListener = object : RealmChangeListener<Realm> {
override fun onChange(element: Realm) {
reloadListView()
}
}
//TaskAdapterを保持するプロパティを定義する
private lateinit var mTaskAdapter: TaskAdapter
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
fab.setOnClickListener { view ->
val intent = Intent(this@MainActivity, InputActivity::class.java)
//category_listオブジェクトをIntentで渡すためDataStateメソッドにてSerializableに変換
val state = DataState(category_list.toMutableList())
//IntentにKEYとstateを紐づけて設定
intent.putExtra(InputActivity.KEY,state)
startActivity(intent)
}
//Realmのオブジェクト取得
mRealm = Realm.getDefaultInstance()
//mRealmListenerをmRealmに設定
mRealm.addChangeListener(mRealmListener)
//ListViewの設定
mTaskAdapter = TaskAdapter(this@MainActivity)
//ListViewをタップした時の処理
listView1.setOnItemClickListener { parent, view, position, id ->
//タップした位置のタスクidを取得
val task = parent.adapter.getItem(position) as Task
//Intentオブジェクトを取得
val intent = Intent(this@MainActivity, InputActivity::class.java)
//IntentオブジェクトにEXTRA_TASKデータとしてタスクidを登録
intent.putExtra(EXTRA_TASK, task.id)
//category_listオブジェクトをIntentで渡すためDataStateメソッドにてSerializableに変換
val state = DataState(category_list.toMutableList())
//IntentにKEYとstateを紐づけて設定
intent.putExtra(InputActivity.KEY,state)
startActivity(intent)
}
//ListViewを長押ししたときに選択したタスクを削除する処理
listView1.setOnItemLongClickListener { parent, _, position, _ ->
//選択したタスクのオブジェクトを取得
val task = parent.adapter.getItem(position) as Task
//ダイアログのオブジェクトを取得
val builder = AlertDialog.Builder(this@MainActivity)
//ダイアログの表示内容を設定
builder.setTitle("削除")
builder.setMessage(task.title + "を削除しますか")
builder.setPositiveButton("OK") { _, _ ->
val results = mRealm.where(Task::class.java).equalTo("id", task.id).findAll()
mRealm.beginTransaction()
results.deleteAllFromRealm()
mRealm.commitTransaction()
val resultIntent = Intent(applicationContext, TaskAlarmReceiver::class.java)
val resultPendingIntent = PendingIntent.getBroadcast(this@MainActivity,task.id,resultIntent,PendingIntent.FLAG_UPDATE_CURRENT)
val alarmManager = getSystemService(ALARM_SERVICE) as AlarmManager
alarmManager.cancel(resultPendingIntent)
reloadListView()
//category_listを更新
categoryList()
}
//CANCELの場合は何もしない
builder.setNegativeButton("CANCEL", null)
//builder2のオブジェクトを取得
val dialog = builder.create()
//dialogを表示
dialog.show()
true
}
//全件削除ボタンを押した時の処理
//ユーザに時々全件削除してもらえればid値が無限に増えるのを防げる
//ただし、category_listも全削除されるので、カテゴリ作成が再度必要になってしまう
//categoryはやはり別クラスでrealmデータベース作成した方が良いかも知れない。
delete_all_button.setOnClickListener{ view ->
val builder2 = AlertDialog.Builder(this@MainActivity)
//ダイアログの表示内容を設定
builder2.setTitle("全件削除")
builder2.setMessage("実行してよろしいですか?")
builder2.setPositiveButton("OK") { _, _ ->
//Taskデータを全て消去。beginTransactionとcommitTransactionで囲む必要あり。
mRealm.beginTransaction()
mRealm.deleteAll()
mRealm.commitTransaction()
reloadListView()
//category_listを更新
categoryList()
}
//CANCELの場合は何もしない
builder2.setNegativeButton("CANCEL", null)
//builder2のオブジェクトを取得
val dialog = builder2.create()
//dialogを表示
dialog.show()
true
}
//全件表示ボタンを押したら時の処理
category_all_button.setOnClickListener{
reloadListView()
}
//spinner_buttonを押した時にspinnerを生成して更新すると同時にspinnerをクリック処理にて実行
//spinnerは生成された時点で先頭の要素が自動的に選択状態となる仕様
//ユーザが意図しない項目選択がされるを防ぐため
//ユーザに起動されるまではspinnerを生成しないようにする
//また、見栄えのためspinner欄はユーザに見えないようにして、ドロップダウンリストのみ見えるようにxmlを設計
spinner_button.setOnClickListener{ view ->
//category_listを更新
categoryList()
//spinnerを生成
spinnerCreate()
//spinnerをクリック処理して実行しドロップダウンメニューを表示
spinner0.performClick()
}
}
//MainActivityが起動、再起動する度にcategory_list更新とListViewの更新を行う。
//こうすることでIntentでcategory_listを渡す際の更新漏れを防ぐ
override fun onStart() {
super.onStart()
categoryList()
reloadListView()
}
private fun reloadListView() {
// Realmデータベースから「全てのデータを取得して新しい日時順に並べた結果」を取得
val taskRealmResults =
mRealm.where(Task::class.java).findAll().sort("date", Sort.DESCENDING)
// 上記の結果をTaskListとしてコピーする
// Realmのデータベースから取得したデータをAdapterで使う場合は一旦コピーしてから渡す
mTaskAdapter.taskList = mRealm.copyFromRealm(taskRealmResults)
// TaskのListView用のアダプタにコピーしたデータをデータを渡す
listView1.adapter = mTaskAdapter
// 表示を更新するために、アダプターにデータが変更されたことを知らせる
mTaskAdapter.notifyDataSetChanged()
}
private fun addTaskForTest() {
val task = Task()
task.category = "サンプル"
task.title = "サンプル"
task.contents = "サンプル"
task.date = Date()
task.id = 0
mRealm.beginTransaction()
mRealm.copyToRealmOrUpdate(task)
mRealm.commitTransaction()
}
private fun categoryList() {
category_list.clear()
//Realmのデータ取得
var results2 = mRealm.where(Task::class.java).findAll()
//インストール直後などRealmデータ数が0の場合は処理を実行しない
if(results2.count() > 0){
category_list.clear()
//Realmのid最大値を取得
var identifier2: Int = results2.max("id")!!.toInt()
Log.d("CAT", identifier2.toString())
var i = 0
//全てのTaskデータのcategory要素を順番に取得し、category_listに格納
do {
//id値がiのTaskデータの最初の値categoryを取得
var results3 = mRealm.where(Task::class.java).equalTo("id", i).findFirst()
//category_listに取得したcategoryを追加
//InputActivityでid値を連番で作成しているので、削除されたid値のデータがnullになり、category_listにもnullが追加されてしまう
//この方法だと毎回すべてのTaskデータをサーチする必要があるので、データが大量にあると重くなるかもしれない。
category_list.add(results3?.category)
i = i + 1
} while (i <= identifier2)
//category_listのnull削除処理前のログ出力確認
Log.d("CAT", "null削除前" + category_list.toString())
} else {
return
}
//category_listに含まれるnullを一括削除。これをspinnerのアダプタ生成時に渡すことでエラー回避が可能。
//Collections.singleton():指定したオブジェクトだけを含む不変のsetを返す。ここではnullのみを含むsetクラスを返す。
category_list.removeAll(Collections.singleton(null))
Log.d("CAT", "null削除後" + category_list.toString())
}
private fun spinnerCreate(){
//spinnerアダプタ設定、独自レイアウトcustom_spinner、リストcategory_list
var adapter = ArrayAdapter(applicationContext, R.layout.custom_spinner,category_list.toList())
adapter.setDropDownViewResource(R.layout.custom_spinner_dropdown)
spinner0.adapter = adapter
spinner0.onItemSelectedListener = object : AdapterView.OnItemSelectedListener {
override fun onItemSelected(parent: AdapterView<*>?, view: View?,position: Int, id: Long) {
val spinnerParent = parent as Spinner
//選択された項目の文字列をitemに取得
val item = spinnerParent.selectedItem as String
//Realmデータベースからcategory==itemとなるデータを取得
val taskRealmResults =
mRealm.where(Task::class.java).equalTo("category", item).findAll()
//上記の結果をTaskListとしてコピーする
//Realmのデータベースから取得したデータをAdapterで使う場合は一旦コピーしてから渡す
mTaskAdapter.taskList = mRealm.copyFromRealm(taskRealmResults)
//TaskのListView用のアダプタにコピーしたデータをデータを渡す
listView1.adapter = mTaskAdapter
//表示を更新するために、アダプターにデータが変更されたことを知らせる
mTaskAdapter.notifyDataSetChanged()
}
override fun onNothingSelected(parent: AdapterView<*>?) {
}
}
}
//Intentでオブジェクトを引き渡すためのDataStateクラス準備
//SerializableでオブジェクトをIntentで引き渡し可能な文字列に変換する
data class DataState(
var list :MutableList<String?>
):Serializable
override fun onDestroy() {
super.onDestroy()
mRealm.close()
}
}
//// カテゴリ検索ボタンを押した時の処理
// category_search_button.setOnClickListener { view ->
// val category_word = category_edit_text.text.toString()
//
// if (category_word == "") {
// val toast = Toast.makeText(this, R.string.msg, Toast.LENGTH_SHORT).show()
// } else {
//// Realmデータベースからcategory==category_wordとなるデータを取得
// val taskRealmResults =
// mRealm.where(Task::class.java).equalTo("category", category_word).findAll()
//// 上記の結果をTaskListとしてコピーする
//// Realmのデータベースから取得したデータをAdapterで使う場合は一旦コピーしてから渡す
// mTaskAdapter.taskList = mRealm.copyFromRealm(taskRealmResults)
//// TaskのListView用のアダプタにコピーしたデータをデータを渡す
// listView1.adapter = mTaskAdapter
//// 表示を更新するために、アダプターにデータが変更されたことを知らせる
// mTaskAdapter.notifyDataSetChanged()
// }
// }
<file_sep>package jp.techacademy.naoto.ichihashi.taskapp
import android.app.AlarmManager
import android.app.DatePickerDialog
import android.app.PendingIntent
import android.app.TimePickerDialog
import android.content.Intent
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.support.v7.widget.Toolbar
import android.util.Log
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.Spinner
import io.realm.Realm
import kotlinx.android.synthetic.main.activity_main.*
import kotlinx.android.synthetic.main.content_input.*
import java.util.*
class InputActivity : AppCompatActivity() {
//MainActivityからIntentで渡されるKEYを定義
companion object{
val KEY = "key"
}
private var mYear = 0
private var mMonth = 0
private var mDay = 0
private var mHour = 0
private var mMinute = 0
private var mTask : Task ? = null
private val mOnDateClickListener = View.OnClickListener{
val datePickerDialog = DatePickerDialog(this,
DatePickerDialog.OnDateSetListener{_,year,month,dayOfMonth ->
mYear = year
mMonth = month
mDay = dayOfMonth
val dateString = mYear.toString() + "/" + String.format("%02d",mMonth + 1)+"/" + String.format("%02d",mDay)
date_button.text = dateString},mYear,mMonth,mDay)
datePickerDialog.show()
}
private val mOnTimeClickListener = View.OnClickListener{
val timePickerDialog = TimePickerDialog(this,
TimePickerDialog.OnTimeSetListener{_,hour,minute ->
mHour = hour
mMinute = minute
val timeString = String.format("%02d",mHour) + ":" + String.format("%02d",mMinute)
times_button.text = timeString
},mHour,mMinute,true)
timePickerDialog.show()
}
private val mOnDoneClickListener = View.OnClickListener{
addTask()
finish()
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_input)
//ActionBarの設定
val toolbar = findViewById<View>(R.id.toolbar) as Toolbar
setSupportActionBar(toolbar)
if(supportActionBar != null){
supportActionBar!!.setDisplayHomeAsUpEnabled(true)
}
//UI部品の設定
date_button.setOnClickListener(mOnDateClickListener)
times_button.setOnClickListener(mOnTimeClickListener)
done_button.setOnClickListener(mOnDoneClickListener)
val intent = intent
val taskId = intent.getIntExtra(EXTRA_TASK,-1)
val realm = Realm.getDefaultInstance()
mTask = realm.where(Task::class.java).equalTo("id",taskId).findFirst()
realm.close()
if(mTask == null){
//新規作成の場合の処理
val calendar = Calendar.getInstance()
mYear = calendar.get(Calendar.YEAR)
mMonth = calendar.get(Calendar.MONTH)
mDay = calendar.get(Calendar.DAY_OF_MONTH)
mHour = calendar.get(Calendar.HOUR_OF_DAY)
mMinute = calendar.get(Calendar.MINUTE)
}else{
//更新の場合の処理
//EditTextにmTaskにすでに登録されている内容を設定
category_edit_text.setText(mTask!!.category)
title_edit_text.setText(mTask!!.title)
content_edit_text.setText(mTask!!.contents)
val calendar = Calendar.getInstance()
calendar.time = mTask!!.date
mYear = calendar.get(Calendar.YEAR)
mMonth = calendar.get(Calendar.MONTH)
mDay = calendar.get(Calendar.DAY_OF_MONTH)
mHour = calendar.get(Calendar.HOUR_OF_DAY)
mMinute = calendar.get(Calendar.MINUTE)
val dateString = mYear.toString() + "/" + String.format("%02d",mMonth + 1) + "/" + String.format("%02d",mDay)
val timeString = String.format("%02d",mHour) + ":" + String.format("%02d",mMinute)
date_button.text = dateString
times_button.text = timeString
}
//spinner_button1クリック時にspinnerを生成してクリック処理を実行
spinner_button1.setOnClickListener{view ->
spinnerCreate()
spinner1.performClick()
}
}
private fun addTask(){
//Realmオブジェクトを取得
val realm = Realm.getDefaultInstance()
//Realmの処理開始
realm.beginTransaction()
if(mTask == null){
//新規作成の場合の処理
mTask = Task()
//Taskクラスの生成とid値の設定
val taskRealmResults = realm.where(Task::class.java).findAll()
val identifier:Int =
if (taskRealmResults.max("id") != null){
//保存済みタスクidが存在する場合、最大値に1を足した値をid値に設定
taskRealmResults.max("id")!!.toInt() + 1
}else{
//保存済みタスクがない(null)の場合、0に設定
0
}
mTask!!.id = identifier
}
//EditTextへの入力をString型で取得
val category = category_edit_text.text.toString()
val title = title_edit_text.text.toString()
val content = content_edit_text.text.toString()
//取得したEditTextのデータをmTaskに登録
mTask!!.category = category
mTask!!.title = title
mTask!!.contents = content
//日時をmTaskに登録
val calendar = GregorianCalendar(mYear,mMonth,mDay,mHour,mMinute)
val date = calendar.time
mTask!!.date = date
//Realmデータを更新
realm.copyToRealmOrUpdate(mTask!!)
//Realm処理確定
realm.commitTransaction()
//Realm終了
realm.close()
//TaskAlarmReceiverを起動するIntentオブジェクトを取得
val resultIntent = Intent(applicationContext,TaskAlarmReceiver::class.java)
//IntentオブジェクトにEXTRA_TASKとしてmTaskのタスクid値を登録
resultIntent.putExtra(EXTRA_TASK,mTask!!.id)
//PendingIntentオブジェクトを取得
val resultPendingIntent = PendingIntent.getBroadcast(
this,
mTask!!.id,
resultIntent,
PendingIntent.FLAG_UPDATE_CURRENT
)
//AlarmManagerオブジェクトを取得
val alarmManager = getSystemService(ALARM_SERVICE) as AlarmManager
alarmManager.set(AlarmManager.RTC_WAKEUP,calendar.timeInMillis,resultPendingIntent)
}
private fun spinnerCreate(){
//Intentから渡されるstateをキーワードKEYを指定し、Serializableにて受け取る。
val state = intent.getSerializableExtra(KEY)
//受け取ったstateを利用した処理を以下のif文内に記述
//下記if文がないとエラーになる。理由は不明。2019.12.29時点。
if(state is MainActivity.DataState){
//stateの中身確認のログ出力
Log.d("CAT",state.list.toString())
//アダプタオブジェクトの生成。stateを.listでMutableList型に、更に.toList()でList型に変換してアダプタに登録
var adapter = ArrayAdapter(applicationContext, R.layout.custom_spinner,state.list.toList())
//spinnerアダプタ設定、独自レイアウトcustom_spinner、リストcategory_list
adapter.setDropDownViewResource(R.layout.custom_spinner_dropdown)
spinner1.adapter = adapter
spinner1.onItemSelectedListener = object : AdapterView.OnItemSelectedListener {
override fun onItemSelected(parent: AdapterView<*>?,view: View?,position: Int,id: Long) {
val spinnerParent = parent as Spinner
//選択された項目の文字列をitemに取得
val item = spinnerParent.selectedItem as String
//EditTextのTextとして選択した文字列を設定
category_edit_text.setText(item)
}
override fun onNothingSelected(parent: AdapterView<*>?) {
}
}
}
}
}
| d52c695d4cd08d8dd2ce09d9d71ce2de19b6a7f0 | [
"Kotlin"
] | 3 | Kotlin | NaotoIchihashi/TaskApp | 773441e564787499ba65d86528e6bcdb7bfcafe1 | 1de90843511749a11dfe4a10dd8777d9a496e2d3 |
refs/heads/master | <repo_name>unidevop/nxvsar<file_sep>/src/VsarInit/src/vsar/Vsar_Project.cxx
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <Vsar_Project.hxx>
//#include <uf.h>
//#include <uf_clone.h>
//#include <uf_part.h>
#include <algorithm>
//#include <boost/shared_ptr.hpp>
#include <boost/filesystem.hpp>
#include <boost/bind.hpp>
#include <NXOpen/Session.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/PartCollection.hxx>
#include <NXOpen/CAE_SimPart.hxx>
#include <NXOpen/CAE_FemPart.hxx>
#include <NXOpen/CAE_AssyFemPart.hxx>
#include <NXOpen/CAE_FTK_DataManager.hxx>
#include <NXOpen/ListingWindow.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <NXOpen/MenuBar_MenuBarManager.hxx>
#include <NXOpen/MenuBar_MenuButton.hxx>
#include <Vsar_Init_Names.hxx>
#include <Vsar_Init_Utils.hxx>
using namespace boost;
using namespace NXOpen;
using namespace NXOpen::CAE;
using namespace NXOpen::MenuBar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
boost::scoped_ptr<Project> Project::m_prjInstance;
Status Project::m_prjStatus;
Status::Status()
{
}
Status::~Status()
{
}
static Status::MenuItemSensitivity s_menuItemSens[] =
{
{MENU_ITEM_NAME_NEW_PROJECT, Status::ProjectStatus_None | Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved },
{MENU_ITEM_NAME_SET_TRAIN, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_SET_RAIL, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_SET_SLAB, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_SET_BRACE, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_SET_BRIDGE, 0},
{MENU_ITEM_NAME_SET_BASE, 0},
{MENU_ITEM_NAME_SET_TUNNEL, 0},
{MENU_ITEM_NAME_EXECUTE_SOLVE, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_SOLVE_RESPONSE, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_SOLVE_NOISE, Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved},
{MENU_ITEM_NAME_LOAD_RESULT, Status::ProjectStatus_Defined | Status::ProjectStatus_ResponseSolved | Status::ProjectStatus_ResponseNoiseSolved | Status::ProjectStatus_NoiseSolved}
};
void Status::SetMenuItemSensitivity(const MenuItemSensitivity &menuItemData, ProjectStatus status)
{
try
{
MenuButton *pMenuButton = UI::GetUI()->MenuBarManager()->GetButtonFromName(menuItemData.m_buttonName.c_str());
if (pMenuButton)
{
MenuButton::SensitivityStatus sens;
if (menuItemData.m_status)
{
sens = (menuItemData.m_status & status) ?
MenuButton::SensitivityStatusSensitive : MenuButton::SensitivityStatusInsensitive;
}
else
{
sens = (menuItemData.m_buttonName == Project::Instance()->GetProperty()->GetBraceMenuItemName()) ?
MenuButton::SensitivityStatusSensitive : MenuButton::SensitivityStatusInsensitive;
}
pMenuButton->SetButtonSensitivity(sens);
}
}
catch (std::exception &)
{
}
}
void Status::Switch(ProjectStatus status)
{
std::for_each(s_menuItemSens, s_menuItemSens + N_ELEMENTS(s_menuItemSens),
boost::bind(&Status::SetMenuItemSensitivity, this, _1, status));
std::string prjStatusVal;
switch (status)
{
case ProjectStatus_Defined:
{
prjStatusVal = ATTRIBUTE_PROJECT_STATUS_DEFINED;
break;
}
case ProjectStatus_ResponseSolved:
{
prjStatusVal = ATTRIBUTE_PROJECT_STATUS_RESPONSE_SOLVED;
break;
}
case ProjectStatus_ResponseNoiseSolved:
{
prjStatusVal = ATTRIBUTE_PROJECT_STATUS_RESPONSE_NOISE_SOLVED;
break;
}
case ProjectStatus_NoiseSolved:
{
prjStatusVal = ATTRIBUTE_PROJECT_STATUS_NOISE_SOLVED;
break;
}
default:
break;
}
if (!prjStatusVal.empty())
{
Project::Instance()->GetProperty()->GetSimPart()->SetAttribute(ATTRIBUTE_PROJECT_STATUS,
prjStatusVal.c_str(), Update::OptionLater);
}
}
BaseProjectProperty::BaseProjectProperty(NXOpen::CAE::SimPart* pSimPrt) : m_simPrt(pSimPrt)
{
}
BaseProjectProperty::~BaseProjectProperty()
{
}
std::string BaseProjectProperty::GetProjectName() const
{
return m_simPrt->GetStringAttribute(ATTRIBUTE_PROJECT_NAME).GetUTF8Text();
}
std::string BaseProjectProperty::GetProjectPath() const
{
boost::filesystem::path fullPath(m_simPrt->FullPath().GetUTF8Text());
return fullPath.parent_path().string();
}
AssyFemPart* BaseProjectProperty::GetAFemPart() const
{
BasePart *pAfemPart = m_simPrt->FemPart();
// Load afem part
if (pAfemPart == NULL)
{
std::string prtName(GetProjectName().append("_a.afm"));
OpenCompPart(prtName);
}
return dynamic_cast<AssyFemPart*>(pAfemPart);
}
BasePart* BaseProjectProperty::OpenCompPart( const std::string & prtName ) const
{
namespace bfs = boost::filesystem;
PartCollection *pPrtCol = Session::GetSession()->Parts();
BasePart *pCompPrt = NULL;
// Get template sim path name
const bfs::path prjPath(GetProjectPath());
bfs::path compPathName(prjPath / prtName);
PartLoadStatus *pPrtLoadStatus = NULL;
DELETE_CLASS_POINTER(pPrtLoadStatus);
pCompPrt = pPrtCol->OpenBase(compPathName.string().c_str(), &pPrtLoadStatus);
return pCompPrt;
}
BasePart* BaseProjectProperty::GetCompPart(const std::string &prtName) const
{
BasePart *pCompPrt = NULL;
try
{
PartCollection *pPrtCol = Session::GetSession()->Parts();
pCompPrt = pPrtCol->FindObject(prtName.c_str());
}
catch(std::exception&)
{
pCompPrt = OpenCompPart(prtName);
}
return pCompPrt;
}
FemPart* BaseProjectProperty::GetRailSlabFemPart() const
{
std::string femName(std::string(RAIL_SLAB_FEM_BASE_NAME).append("_f.fem"));
BasePart *pFemPrt = GetCompPart(femName);
return dynamic_cast<FemPart*>(pFemPrt);
}
Part* BaseProjectProperty::GetRailSlabIdeaPart() const
{
std::string prtName(std::string(RAIL_SLAB_FEM_BASE_NAME).append("_f_i.prt"));
BasePart *pPrt = GetCompPart(prtName);
return dynamic_cast<Part*>(pPrt);
}
FemPart* BaseProjectProperty::GetBraceFemPart() const
{
std::string femName(GetBraceTemplateBaseName().append("_f.fem"));
BasePart *pFemPrt = GetCompPart(femName);
return dynamic_cast<FemPart*>(pFemPrt);
}
Part* BaseProjectProperty::GetBraceIdeaPart() const
{
std::string prtName(GetBraceTemplateBaseName().append("_f_i.prt"));
BasePart *pPrt = GetCompPart(prtName);
return dynamic_cast<Part*>(pPrt);
}
Part* BaseProjectProperty::GetGeometryPart() const
{
BasePart *pPrt = GetCompPart(GetRootPartName());
return dynamic_cast<Part*>(pPrt);
}
int BaseProjectProperty::GetTemplatePartFiles(std::vector<std::string> &fileNames) const
{
fileNames.clear();
const char *commonParts[] = {"base.prt", "beam.prt", "bridge.prt", "carriage.prt",
"rail-transit.prt", "rail.prt", "rails.prt", "slab.prt",
"slabs.prt", "train.prt", "tunnel.prt"};
int eleCnt = sizeof(commonParts) / sizeof(commonParts[0]);
fileNames.insert(fileNames.begin(), commonParts, commonParts + eleCnt);
return static_cast<int>(fileNames.size());
}
std::string BaseProjectProperty::GetRootPartName() const
{
return ROOT_PART_NAME;
}
BridgeProperty::BridgeProperty(NXOpen::CAE::SimPart* pSimPrt)
: BaseProjectProperty(pSimPrt)
{
}
BridgeProperty::~BridgeProperty()
{
}
Project::ProjectType BridgeProperty::GetProjectType() const
{
return Project::ProjectType_Bridge;
}
std::string BridgeProperty::GetProjectTypeName() const
{
return PROJECT_TYPE_NAME_BRIDGE;
}
std::string BridgeProperty::GetTemplateBaseName() const
{
return TEMPLATE_BASE_NAME_BRIDGE;
}
std::string BridgeProperty::GetBraceTemplateBaseName() const
{
return TEMPLATE_BRACE_BASE_NAME_BRIDGE;
}
std::string BridgeProperty::GetBraceMenuItemName() const
{
return MENU_ITEM_NAME_SET_BRIDGE;
}
SelmiInfiniteBaseProperty::SelmiInfiniteBaseProperty(NXOpen::CAE::SimPart* pSimPrt)
: BaseProjectProperty(pSimPrt)
{
}
SelmiInfiniteBaseProperty::~SelmiInfiniteBaseProperty()
{
}
Project::ProjectType SelmiInfiniteBaseProperty::GetProjectType() const
{
return Project::ProjectType_Selmi_Infinite;
}
std::string SelmiInfiniteBaseProperty::GetProjectTypeName() const
{
return PROJECT_TYPE_NAME_SELMI_INFINITE;
}
std::string SelmiInfiniteBaseProperty::GetBraceTemplateBaseName() const
{
return TEMPLATE_BRACE_BASE_NAME_SELMI_INFINITE;
}
std::string SelmiInfiniteBaseProperty::GetBraceMenuItemName() const
{
return MENU_ITEM_NAME_SET_BASE;
}
std::string SelmiInfiniteBaseProperty::GetTemplateBaseName() const
{
return TEMPLATE_BASE_NAME_SELMI_INFINITE;
}
TunnelProperty::TunnelProperty(NXOpen::CAE::SimPart* pSimPrt)
: BaseProjectProperty(pSimPrt)
{
}
TunnelProperty::~TunnelProperty()
{
}
Project::ProjectType TunnelProperty::GetProjectType() const
{
return Project::ProjectType_Tunnel;
}
std::string TunnelProperty::GetProjectTypeName() const
{
return PROJECT_TYPE_NAME_TUNNEL;
}
std::string TunnelProperty::GetTemplateBaseName() const
{
return TEMPLATE_BASE_NAME_TUNNEL;
}
std::string TunnelProperty::GetBraceTemplateBaseName() const
{
return TEMPLATE_BRACE_BASE_NAME_TUNNEL;
}
std::string TunnelProperty::GetBraceMenuItemName() const
{
return MENU_ITEM_NAME_SET_TUNNEL;
}
void Project::New(const std::string &prjName, const std::string &prjPath, ProjectType prjType)
{
if (m_prjInstance.get())
{
throw NXException::Create(MSGTXT("Failed to create a new vsdane project. Only one project could exist in the session."));
}
m_prjInstance.reset(new Project(prjName, prjPath, prjType));
m_prjStatus.Switch(Status::ProjectStatus_Defined);
}
Project::ProjectType Project::GetProjectTypeOfPart(BasePart* pOpenedPrt)
{
CAE::SimPart *pSimPrt = dynamic_cast<CAE::SimPart*>(pOpenedPrt);
Project::ProjectType prjType;
bool isPrjPrt = false;
if (pSimPrt)
{
std::string prjAttrName(pSimPrt->GetStringAttribute(ATTRIBUTE_PROJECT_NAME).GetUTF8Text());
std::string prjAttrType(pSimPrt->GetStringAttribute(ATTRIBUTE_PROJECT_TYPE).GetUTF8Text());
if (!prjAttrName.empty() && !prjAttrType.empty())
{
if (prjAttrType.compare(PROJECT_TYPE_NAME_BRIDGE) == 0)
{
prjType = Project::ProjectType_Bridge;
isPrjPrt = true;
}
else if (prjAttrType.compare(PROJECT_TYPE_NAME_SELMI_INFINITE) == 0)
{
prjType = Project::ProjectType_Selmi_Infinite;
isPrjPrt = true;
}
else if (prjAttrType.compare(PROJECT_TYPE_NAME_TUNNEL) == 0)
{
prjType = Project::ProjectType_Tunnel;
isPrjPrt = true;
}
}
}
if (!isPrjPrt)
throw NXException::Create(MSGTXT("The part is not a vsar project part."));
return prjType;
}
Status::ProjectStatus Project::GetProjectStatusOfPart(BasePart* pOpenedPrt)
{
CAE::SimPart *pSimPrt = dynamic_cast<CAE::SimPart*>(pOpenedPrt);
Status::ProjectStatus prjStatus = Status::ProjectStatus_None;
bool isPrjPrt = false;
if (pSimPrt)
{
std::string prjAttrName(pSimPrt->GetStringAttribute(ATTRIBUTE_PROJECT_NAME).GetUTF8Text());
std::string prjAttrStatus(pSimPrt->GetStringAttribute(ATTRIBUTE_PROJECT_STATUS).GetUTF8Text());
if (!prjAttrName.empty() && !prjAttrStatus.empty())
{
if (prjAttrStatus.compare(ATTRIBUTE_PROJECT_STATUS_DEFINED) == 0)
{
prjStatus = Status::ProjectStatus_Defined;
isPrjPrt = true;
}
else if (prjAttrStatus.compare(ATTRIBUTE_PROJECT_STATUS_RESPONSE_SOLVED) == 0)
{
prjStatus = Status::ProjectStatus_ResponseSolved;
isPrjPrt = true;
}
else if (prjAttrStatus.compare(ATTRIBUTE_PROJECT_STATUS_RESPONSE_NOISE_SOLVED) == 0)
{
prjStatus = Status::ProjectStatus_ResponseNoiseSolved;
isPrjPrt = true;
}
else if (prjAttrStatus.compare(ATTRIBUTE_PROJECT_STATUS_NOISE_SOLVED) == 0)
{
prjStatus = Status::ProjectStatus_NoiseSolved;
isPrjPrt = true;
}
}
}
if (!isPrjPrt)
throw NXException::Create(MSGTXT("The part is not a vsar project part."));
return prjStatus;
}
void Project::Open(BasePart* pOpenedPrt)
{
try
{
ProjectType prjType(GetProjectTypeOfPart(pOpenedPrt));
if (m_prjInstance.get())
{
UI::GetUI()->NXMessageBox()->Show("Open Project", NXMessageBox::DialogTypeError,
MSGTXT("Failed to open vsdane project. Only one project could exist in the session."));
// TODO: Close the part
//pOpenedPrt->Close(BasePart::CloseWholeTreeTrue, BasePart::CloseModifiedCloseModified, NULL);
throw NXException::Create(MSGTXT("Failed to open vsdane project. Only one project could exist in the session."));
}
m_prjInstance.reset(new Project(dynamic_cast<CAE::SimPart*>(pOpenedPrt), prjType));
m_prjStatus.Switch(GetProjectStatusOfPart(pOpenedPrt));
// Load result
BaseProjectProperty *pPrjProp = m_prjInstance->GetProperty();
// Load response result
filesystem::path resultPathName(filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetResponseOp2ResultName());
LoadResult(resultPathName.string());
resultPathName = filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetResponseAfuResultName();
LoadResult(resultPathName.string());
// Load noise
resultPathName = filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetNoiseResultName();
LoadResult(resultPathName.string());
}
catch (std::exception &)
{
}
}
void Project::Close(BasePart* pOpenedPrt)
{
try
{
ProjectType prjType(GetProjectTypeOfPart(pOpenedPrt));
if (m_prjInstance.get() &&
m_prjInstance->GetProperty()->GetSimPart() == pOpenedPrt)
{
m_prjInstance.reset();
m_prjStatus.Switch(Status::ProjectStatus_None);
}
}
catch (std::exception &)
{
}
}
Project* Project::Instance()
{
return m_prjInstance.get();
}
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
Project::Project(const std::string &prjName, const std::string &prjPath, ProjectType prjType)
{
switch (prjType)
{
case ProjectType_Bridge:
m_prjProperty.reset(new BridgeProperty());
break;
case ProjectType_Selmi_Infinite:
m_prjProperty.reset(new SelmiInfiniteBaseProperty());
break;
case ProjectType_Tunnel:
m_prjProperty.reset(new TunnelProperty());
break;
default:
break;
}
Initilize(prjName, prjPath);
}
Project::Project(CAE::SimPart* pSimPrt, ProjectType prjType)
{
switch (prjType)
{
case ProjectType_Bridge:
m_prjProperty.reset(new BridgeProperty(pSimPrt));
break;
case ProjectType_Selmi_Infinite:
m_prjProperty.reset(new SelmiInfiniteBaseProperty(pSimPrt));
break;
case ProjectType_Tunnel:
m_prjProperty.reset(new TunnelProperty(pSimPrt));
break;
default:
break;
}
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
Project::~Project()
{
}
void Project::Initilize(const std::string &prjName, const std::string &prjPath)
{
LoadPrtPart(prjPath);
LoadSimPart(prjName, prjPath);
SetProjectAttribute(prjName);
}
void Project::LoadPrtPart(const std::string &prjPath)
{
namespace bfs = boost::filesystem;
// Copy part files to location
const bfs::path templatePath(bfs::path(GetInstallPath()) / TEMPLATE_FOLDER_NAME);
//ListingWindow *pLstWnd = Session::GetSession()->ListingWindow();
//pLstWnd->Open();
CopyDirectory(bfs::path(templatePath / TEMPLATE_COMMON_FOLDER_NAME).string(), prjPath);
CopyDirectory(bfs::path(templatePath / m_prjProperty->GetTemplateBaseName()).string(), prjPath);
// copy Idealized Part
//const std::string idealPrtName(m_prjProperty->GetTemplateBaseName().append("_f_i.prt"));
//fromFilePathName = templatePath / m_prjProperty->GetTemplateBaseName() / idealPrtName;
//toFilePathName = toPath / idealPrtName;
//if (bfs::exists(toFilePathName))
// bfs::remove_all(toFilePathName);
//bfs::copy_file(fromFilePathName, toFilePathName);
}
void Project::LoadSimPart(const std::string &prjName, const std::string &prjPathName)
{
// Open sim and fem, save as to location
Session *pSession = Session::GetSession();
PartCollection *pPrtCol = pSession->Parts();
CAE::SimPart *pSimPrt = NULL;
CAE::CaePart *pFemPrt = NULL;
namespace bfs = boost::filesystem;
// Get template sim path name
const bfs::path prjPath(prjPathName);
bfs::path tempPrtPathName(prjPath / m_prjProperty->GetTemplateSimName());
PartLoadStatus *pPrtLoadStatus = NULL;
// Open template sim part
{
DELETE_CLASS_POINTER(pPrtLoadStatus);
pSimPrt = dynamic_cast<CAE::SimPart*>(pPrtCol->OpenBaseDisplay(tempPrtPathName.string().c_str(),
&pPrtLoadStatus));
}
// Save template sim parts to project folder
try
{
bfs::path toFilePathName;
pFemPrt = pSimPrt->FemPart();
// save as afem part
toFilePathName = prjPath / std::string(prjName).append("_a.afm");
SaveAsComp(pFemPrt, toFilePathName);
// save as sim file
toFilePathName = prjPath / std::string(prjName).append("_s.sim");
SaveAsComp(pSimPrt, toFilePathName);
const std::string tempBaseName(m_prjProperty->GetTemplateBaseName());
// delete original afm part
std::string tempSimName(std::string(tempBaseName).append("_s.sim"));
std::string tempAfmName(std::string(tempBaseName).append("_a.afm"));
boost::filesystem::remove_all(prjPath / tempSimName);
boost::filesystem::remove_all(prjPath / tempAfmName);
}
catch(std::exception &)
{
pSimPrt->Close(BasePart::CloseWholeTreeTrue, BasePart::CloseModifiedCloseModified, NULL);
throw;
}
m_prjProperty->SetSimPart(pSimPrt);
}
void Project::SaveAsComp( NXOpen::BasePart * pPrt,
const boost::filesystem::path &toFilePathName)
{
PartSaveStatus * pPrtSaveStatus = NULL;
// remove file in the target path
if (boost::filesystem::exists(toFilePathName))
boost::filesystem::remove_all(toFilePathName);
DELETE_CLASS_POINTER(pPrtSaveStatus);
pPrtSaveStatus = pPrt->SaveAs(toFilePathName.string().c_str());
}
void Project::SetProjectAttribute(const std::string &prjName)
{
CAE::SimPart *pSimPrt = GetProperty()->GetSimPart();
Session *theSession = Session::GetSession();
Session::UndoMarkId markId;
markId = theSession->SetUndoMark(Session::MarkVisibilityInvisible, "Set Project Attribute");
pSimPrt->SetAttribute(ATTRIBUTE_PROJECT_NAME, prjName.c_str(), Update::OptionLater);
pSimPrt->SetAttribute(ATTRIBUTE_PROJECT_TYPE, m_prjProperty->GetProjectTypeName().c_str(), Update::OptionLater);
pSimPrt->SetAttribute(ATTRIBUTE_PROJECT_STATUS, ATTRIBUTE_PROJECT_STATUS_DEFINED, Update::OptionLater);
Update *pUpdateMgr = theSession->UpdateManager();
if (pUpdateMgr->DoUpdate(markId) != 0)
{
NXException::Create(pUpdateMgr->ErrorList()->GetErrorInfo(0)->ErrorCode());
}
pSimPrt->Save(BasePart::SaveComponentsTrue, BasePart::CloseAfterSaveFalse);
}
//void Project::LoadFromTemplate(const std::string &prjPath)
//{
// int rtc = 0;
// UF_PART_load_status_t loadStatus = {0};
// UF_CLONE_initialise(UF_CLONE_clone_operation);
// std::string strAssemName(GetInstallPath().append("template"));
// strAssemName.push_back(PATH_DELIM);
// strAssemName.append("rail-transit.prt");
// rtc = UF_CALL(UF_CLONE_add_assembly(strAssemName.c_str(), &loadStatus));
// rtc = UF_CALL(UF_CLONE_set_def_action(UF_CLONE_clone));
// rtc = UF_CALL(UF_CLONE_set_def_directory(prjPath.c_str()));
// rtc = UF_CALL(UF_CLONE_set_def_naming(UF_CLONE_naming_rule));
// std::string outputPartName(m_prjName + "_");//(prjPath);
// //if (*prjPath.rbegin() != PATH_DELIM)
// // outputPartName.push_back(PATH_DELIM);
// //outputPartName.append(m_prjName).append("_main.prt");
// //rtc = UF_CALL(UF_CLONE_set_naming(strAssemName, UF_CLONE_user_name, outputPartName.c_str()));
// UF_CLONE_name_rule_def_t nameRule = {UF_CLONE_prepend_string, NULL,
// const_cast<char*>(outputPartName.c_str())};
// UF_CLONE_naming_failures_t nameFailures = {0};
// rtc = UF_CALL(UF_CLONE_init_naming_failures(&nameFailures));
// rtc = UF_CALL(UF_CLONE_set_name_rule(&nameRule, &nameFailures));
// rtc = UF_CALL(UF_CLONE_perform_clone(&nameFailures));
// BOOST_SCOPE_EXIT((&loadStatus)(&nameFailures))
// {
// UF_PART_free_load_status(&loadStatus);
// if (nameFailures.n_failures > 0)
// {
// UF_free(nameFailures.statuses);
// UF_free_string_array(nameFailures.n_failures, nameFailures.input_names);
// UF_free_string_array(nameFailures.n_failures, nameFailures.output_names);
// }
// UF_CLONE_terminate();
// }
// BOOST_SCOPE_EXIT_END
//}
}
<file_sep>/src/NXVsar/src/vsar/Vsar_Result.cxx
#include <uf_defs.h>
#include <Vsar_Result.hxx>
#include <algorithm>
#include <fstream>
#include <regex>
#include <boost/filesystem.hpp>
#include <boost/scope_exit.hpp>
#include <boost/lambda/lambda.hpp>
#include <boost/lambda/bind.hpp>
//#include <boost/function.hpp>
//#include <boost/bind.hpp>
#include <boost/tuple/tuple.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/format.hpp>
#include <boost/foreach.hpp>
#include <uf_ui.h>
#include <NXOpen/ugmath.hxx>
#include <NXOpen/Session.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/Point.hxx>
#include <NXOpen/CAE_AfuManager.hxx>
#include <NXOpen/CAE_AfuData.hxx>
#include <NXOpen/CAE_AfuDataConvertor.hxx>
#include <NXOpen/CAE_FTK_DataManager.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Project.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_SolveOperation.hxx>
using namespace boost;
using namespace boost::lambda;
using namespace NXOpen;
using namespace NXOpen::CAE;
using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
BaseResult::~BaseResult()
{
}
bool BaseResult::IsResultExist() const
{
return filesystem::exists(GetResultPathName());
}
bool BaseResult::Load() const
{
bool success = false;
if (IsResultExist())
{
try
{
FTK::DataManager *pDataMgr = Session::GetSession()->DataManager();
pDataMgr->LoadFile(GetResultPathName().c_str());
}
catch (std::exception &)
{
}
success = true;
}
else
success = false;
return success;
}
std::string ResponseOp2Result::GetResultPathName() const
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
return (filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetResponseOp2ResultName()).string();
}
void BaseAfuResult::Create()
{
CreateResultFile();
Session::GetSession()->DataManager()->LoadFile(GetResultPathName());
CreateRecords();
}
void BaseAfuResult::CreateResultFile()
{
Session::GetSession()->AfuManager()->CreateNewAfuFile(GetResultPathName().c_str());
}
#if 0
static ResponseRecordItem s_responseRecordItems[] =
{
{"Beam-time-acceleration", "beam_out.dat", 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Vehicle-time-acceleration", "vehicle_out.dat", 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Rail-Midspan-time-acceleration", "rail_out.dat", 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Slab-time-acceleration", "fslab_out.dat", 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Sometime-Rail-time-acceleration", "rail_timeout.dat", 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Sometime-Beam-time-acceleration", "beam_timeout.dat", 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Sometime-Rail-displacement", "rail_timeout.dat", 0, 1, 4, XyFunctionUnitTimeSec, XyFunctionUnitDisplacementM },
{"Sometime-Beam-displacement", "beam_timeout.dat", 0, 1, 4, XyFunctionUnitTimeSec, XyFunctionUnitDisplacementM },
{"Rail-Midspan-displacement", "rail_out.dat", 0, 1, 4, XyFunctionUnitTimeSec, XyFunctionUnitDisplacementM },
{"Slab-time-displacement", "fslab_out.dat", 0, 1, 4, XyFunctionUnitTimeSec, XyFunctionUnitDisplacementM },
{"Beam-time-displacement", "beam_out.dat", 0, 1, 4, XyFunctionUnitTimeSec, XyFunctionUnitDisplacementM }
};
#endif
struct ResponseRecordItem
{
std::string m_recordName;
std::string m_srcResultFile;
unsigned int m_idxColumns[2];
unsigned int m_columnCnt;
XyFunctionUnit m_xUnit;
XyFunctionUnit m_yUnit;
};
static ResponseRecordItem s_responseRecordItems[] =
{
{"Vehicle-Time-Acceleration", VEHICLE_OUTPUT_FILE_NAME, 0, 3, 4, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Wheel-1-Time-Acceleration", WHEEL_OUTPUT_FILE_NAME, 0, 3, 13, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Wheel-2-Time-Acceleration", WHEEL_OUTPUT_FILE_NAME, 0, 6, 13, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Wheel-3-Time-Acceleration", WHEEL_OUTPUT_FILE_NAME, 0, 9, 13, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Wheel-4-Time-Acceleration", WHEEL_OUTPUT_FILE_NAME, 0, 12, 13, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Turn-Front-Time-Acceleration", TURN_OUTPUT_FILE_NAME, 0, 3, 7, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM},
{"Turn-Rear-Time-Acceleration", TURN_OUTPUT_FILE_NAME, 0, 6, 7, XyFunctionUnitTimeSec, XyFunctionUnitAccelerationM}
};
std::string ResponseAfuResult::GetResultPathName() const
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
return (filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetResponseAfuResultName()).string();
}
void ResponseAfuResult::CreateRecords()
{
std::for_each(s_responseRecordItems, s_responseRecordItems + N_ELEMENTS(s_responseRecordItems),
bind(&ResponseAfuResult::CreateRecord, this, _1));
}
void ResponseAfuResult::CreateRecord(const ResponseRecordItem &recordItem)
{
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
boost::scoped_ptr<AfuData> pAfuData(pAfuMgr->CreateAfuData());
pAfuData->SetFileName(GetResultPathName().c_str());
pAfuData->SetRecordName(recordItem.m_recordName);
pAfuData->SetAxisDefinition(AfuData::AbscissaTypeUneven, recordItem.m_xUnit,
AfuData::OrdinateTypeReal, recordItem.m_yUnit);
pAfuData->SetFunctionDataType(XyFunctionDataTypeTime);
// Read xy values from dat file
std::vector<double> xValues;
std::vector<double> yValues;
ReadDataFromDat(recordItem, xValues, yValues);
pAfuData->SetRealData(xValues, yValues);
pAfuMgr->CreateRecord(pAfuData.get());
}
void ResponseAfuResult::ReadDataFromDat(const ResponseRecordItem &recordItem,
std::vector<double> &xValues, std::vector<double> &yValues) const
{
// Get dat path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
std::string datResultPathName((filesystem::path(m_inputPath) /
recordItem.m_srcResultFile).string());
if (!filesystem::exists(datResultPathName))
NXException::Create(MSGTXT("Result file does not exist."));
std::ifstream ifDatResult(datResultPathName.c_str());
std::vector<double> srcRecLineItem(recordItem.m_columnCnt);
while (ifDatResult.good())
{
std::for_each(srcRecLineItem.begin(), srcRecLineItem.end(), ifDatResult >> _1);
xValues.push_back(srcRecLineItem[recordItem.m_idxColumns[0]]);
yValues.push_back(srcRecLineItem[recordItem.m_idxColumns[1]]);
}
}
class ResultBlock
{
public:
ResultBlock() : m_label(0)
{
}
virtual ~ResultBlock()
{
}
bool Read(std::ifstream &ifStream)
{
if (!ReadHead(ifStream))
return false;
ReadBody(ifStream);
return true;
}
virtual void Write(const std::string &afuFile) = 0;
virtual XyFunctionUnit GetXUnit() const;
virtual XyFunctionUnit GetYUnit() const = 0;
virtual std::string GetBlockName() const = 0;
virtual std::string GetKeyName() const = 0;
protected:
virtual bool ReadHead(std::ifstream &ifStream);
virtual void ReadHeadInfo(const std::string &strHeadLine);
virtual void ReadBody(std::ifstream &ifStream) = 0;
protected:
int m_label;
};
XyFunctionUnit ResultBlock::GetXUnit() const
{
return XyFunctionUnitTimeSec;
}
bool ResultBlock::ReadHead(std::ifstream &ifStream)
{
std::string strRead;
bool found = false;
std::string strBlockId(std::string("$").append(GetBlockName()));
std::tr1::regex reg(std::string("^\\$").append(GetKeyName()).append("\\s*=\\s*(\\d+).*"));
while(!ifStream.eof())
{
if (found) // found
{
if (m_label == 0)
{
std::getline(ifStream, strRead);
std::tr1::smatch what;
if(std::tr1::regex_match(strRead, what, reg) && what.size() == 2)
{
m_label = boost::lexical_cast<int>(what[1]);
}
}
else
{
if (ifStream.peek() != '$') // read content
break;
else
{
std::getline(ifStream, strRead);
ReadHeadInfo(strRead);
}
}
}
else // not found
{
ifStream >> strRead;
if (strRead == strBlockId)
{
found = true;
}
ifStream.ignore(200, '\n'); // ignore rest of the line
}
}
return found;
}
void ResultBlock::ReadHeadInfo(const std::string &strHeadLine)
{
}
class NodeResultBlock : public ResultBlock
{
public:
typedef boost::tuple<double, Point3d, Vector3d> NodeDataItem;
public:
NodeResultBlock();
virtual ~NodeResultBlock();
virtual void Write(const std::string &afuFile);
virtual std::string GetKeyName() const;
protected:
virtual void ReadBody(std::ifstream &ifStream);
void WriteRecord(const std::string &afuFile, const std::string &recordName,
const std::vector<double> &xValues, const std::vector<double> &yValues);
private:
std::vector<NodeDataItem> m_values;
};
NodeResultBlock::NodeResultBlock() : m_values()
{
}
NodeResultBlock::~NodeResultBlock()
{
}
std::string NodeResultBlock::GetKeyName() const
{
return "POINT\\s*ID";
}
void NodeResultBlock::ReadBody(std::ifstream &ifStream)
{
std::string strRead;
char aStr[10];
double time;
Point3d coord;
Vector3d rotate;
while(!ifStream.eof() && (ifStream.peek() != '$'))
{
ifStream >> time >> aStr >> coord.X >> coord.Y >> coord.Z;
ifStream.ignore(200, '\n'); // ignore rest of the line
ifStream >> aStr >> rotate.X >> rotate.Y >> rotate.Z;
ifStream.ignore(200, '\n'); // ignore rest of the line
m_values.push_back(NodeDataItem(time, coord, rotate));
}
}
void NodeResultBlock::WriteRecord(const std::string &afuFile, const std::string &recordName,
const std::vector<double> &xValues, const std::vector<double> &yValues)
{
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
boost::scoped_ptr<AfuData> pAfuData(pAfuMgr->CreateAfuData());
pAfuData->SetFileName(afuFile.c_str());
pAfuData->SetRecordName(recordName.c_str());
pAfuData->SetAxisDefinition(AfuData::AbscissaTypeUneven, GetXUnit(),
AfuData::OrdinateTypeReal, GetYUnit());
pAfuData->SetFunctionDataType(XyFunctionDataTypeTime);
pAfuData->SetRealData(xValues, yValues);
pAfuMgr->CreateRecord(pAfuData.get());
}
void NodeResultBlock::Write(const std::string &afuFile)
{
std::vector<double> times;
std::vector<double> xCoords;
std::vector<double> yCoords;
std::vector<double> zCoords;
times.reserve(m_values.size());
xCoords.reserve(m_values.size());
yCoords.reserve(m_values.size());
zCoords.reserve(m_values.size());
BOOST_FOREACH(NodeDataItem dispItem, m_values)
{
times.push_back(dispItem.get<0>());
xCoords.push_back(dispItem.get<1>().X);
yCoords.push_back(dispItem.get<1>().Y);
zCoords.push_back(dispItem.get<1>().Z);
}
boost::format recordName(boost::format(RESPONSE_RESULT_RECORD_PATTERN_NAME) % GetBlockName() % m_label);
WriteRecord(afuFile, (boost::format(recordName) % "X").str(), times, xCoords);
WriteRecord(afuFile, (boost::format(recordName) % "Y").str(), times, yCoords);
WriteRecord(afuFile, (boost::format(recordName) % "Z").str(), times, zCoords);
}
class DisplacementBlock : public NodeResultBlock
{
public:
DisplacementBlock();
virtual ~DisplacementBlock();
virtual XyFunctionUnit GetYUnit() const;
virtual std::string GetBlockName() const;
};
DisplacementBlock::DisplacementBlock() : NodeResultBlock()
{
}
DisplacementBlock::~DisplacementBlock()
{
}
XyFunctionUnit DisplacementBlock::GetYUnit() const
{
return XyFunctionUnitDisplacementMm;
}
std::string DisplacementBlock::GetBlockName() const
{
return "DISPLACEMENTS";
}
class AccelerationBlock : public NodeResultBlock
{
public:
AccelerationBlock();
virtual ~AccelerationBlock();
virtual XyFunctionUnit GetYUnit() const;
virtual std::string GetBlockName() const;
};
AccelerationBlock::AccelerationBlock() : NodeResultBlock()
{
}
AccelerationBlock::~AccelerationBlock()
{
}
XyFunctionUnit AccelerationBlock::GetYUnit() const
{
return XyFunctionUnitAccelerationMm;
}
std::string AccelerationBlock::GetBlockName() const
{
return "ACCELERATION";
}
class VelocityBlock : public NodeResultBlock
{
public:
VelocityBlock();
virtual ~VelocityBlock();
virtual XyFunctionUnit GetYUnit() const;
virtual std::string GetBlockName() const;
};
VelocityBlock::VelocityBlock() : NodeResultBlock()
{
}
VelocityBlock::~VelocityBlock()
{
}
XyFunctionUnit VelocityBlock::GetYUnit() const
{
return XyFunctionUnitVelocityMm;
}
std::string VelocityBlock::GetBlockName() const
{
return "VELOCITY";
}
std::string NastranResult::GetNastranResultPathName() const
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
std::string strSolverResultName((boost::format(NASTRAN_PCH_RESULT_FILE_PATTERN_NAME) %
pPrjProp->GetProjectName() % VSDANE_SOLUTION_NAME).str());
return ((filesystem::path(pPrjProp->GetProjectPath()) / strSolverResultName).string());
}
template<typename BlockType>
StlResultBlockVector NastranResult::ReadDataBlock(std::ifstream &ifStream)
{
StlResultBlockVector vResultBlock;
// Handle displacement result
ifStream.clear();
ifStream.seekg(0, std::ios::beg); // goto file head
while (!ifStream.eof())
{
boost::shared_ptr<ResultBlock> pResultBlock(new BlockType());
if (pResultBlock->Read(ifStream))
vResultBlock.push_back(pResultBlock);
}
return vResultBlock;
}
void NastranResult::CreateRecords()
{
std::string resultName(GetResultPathName());
// remove work dir
BOOST_SCOPE_EXIT((&resultName))
{
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
// DELETE AFU
if (pAfuMgr->GetRecordIndexes(resultName.c_str()).empty())
pAfuMgr->DeleteAfuFile(resultName.c_str());
}
BOOST_SCOPE_EXIT_END
std::string nastranResultName(GetNastranResultPathName());
if (!filesystem::exists(nastranResultName))
throw NXException::Create("No solve result exists.");
StlResultBlockVector vAllResultBlock;
std::ifstream solverResult(nastranResultName.c_str());
vAllResultBlock = ExtractContent(solverResult);
if (!vAllResultBlock.empty())
{
BOOST_FOREACH(StlResultBlockVector::value_type pResultBlock, vAllResultBlock)
{
pResultBlock->Write(resultName);
}
}
else
throw NXException::Create("No output result of specified type exists.");
}
#if 0
std::string ResponseResult::GetResultPathName() const
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
return (filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetResponseResultName()).string();
}
StlResultBlockVector ResponseResult::ExtractContent(std::ifstream &solverResult)
{
StlResultBlockVector vResultBlock;
StlResultBlockVector vAllResultBlock;
vResultBlock = ReadDataBlock<DisplacementBlock>(solverResult);
vAllResultBlock.insert(vAllResultBlock.end(), vResultBlock.begin(), vResultBlock.end());
vResultBlock = ReadDataBlock<AccelerationBlock>(solverResult);
vAllResultBlock.insert(vAllResultBlock.end(), vResultBlock.begin(), vResultBlock.end());
// element stress block
return vAllResultBlock;
}
#endif
//////////////////////////////////////////////////////////////////////////
// NoiseIntermResult
std::string NoiseIntermResult::GetResultPathName() const
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
return (filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetNoiseIntermediateResultName()).string();
}
StlResultBlockVector NoiseIntermResult::ExtractContent(std::ifstream &solverResult)
{
return ReadDataBlock<VelocityBlock>(solverResult);
}
//////////////////////////////////////////////////////////////////////////
// NoiseResult
std::string NoiseResult::GetResultPathName() const
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
return (filesystem::path(pPrjProp->GetProjectPath()) /
pPrjProp->GetNoiseResultName()).string();
}
void NoiseResult::CreateRecords()
{
std::string resultName(GetResultPathName());
// remove work dir
BOOST_SCOPE_EXIT((&resultName))
{
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
// DELETE AFU
if (pAfuMgr->GetRecordIndexes(resultName.c_str()).empty())
pAfuMgr->DeleteAfuFile(resultName.c_str());
}
BOOST_SCOPE_EXIT_END
for (unsigned int idx = 0; idx < m_outputPoints.size(); idx++)
{
OutputTimeRecord(idx);
OutputFreqRecord(idx);
}
}
void NoiseResult::OutputTimeRecord(int idxRecord)
{
// output time-sound pressure
Point3d coord(m_outputPoints[idxRecord]->Coordinates());
std::string noiseOutputName;
std::string recordName;
noiseOutputName = (boost::format(NOISE_TIME_OUTPUT_FILE_NAME) % (idxRecord+1)).str();
recordName = (boost::format(NOISE_RESULT_TIME_RECORD_PATTERN_NAME) %
coord.X % coord.Y % coord.Z).str();
std::vector<double> xValues;
std::vector<double> yValues;
// Read xy values from dat file
ReadDataFromDat(noiseOutputName, xValues, yValues);
WriteRecord(recordName, XyFunctionDataTypeTime,
XyFunctionUnitTimeSec, XyFunctionUnitUnknown, xValues, yValues);
}
void NoiseResult::OutputFreqRecord(int idxRecord)
{
// output frequency-sound pressure
Point3d coord(m_outputPoints[idxRecord]->Coordinates());
std::string noiseOutputName;
std::string recordName;
noiseOutputName = (boost::format(NOISE_FREQ_OUTPUT_FILE_NAME) % (idxRecord+1)).str();
recordName = (boost::format(NOISE_RESULT_FREQ_RECORD_PATTERN_NAME) %
coord.X % coord.Y % coord.Z).str();
std::vector<double> xValues;
std::vector<double> yValues;
// Read xy values from dat file
ReadDataFromDat(noiseOutputName, xValues, yValues);
WriteRecord(recordName, XyFunctionDataTypeGeneral,
XyFunctionUnitFrequencyHz, XyFunctionUnitUnknown, xValues, yValues);
#if 0
// FFT Time-Sound data
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
AfuDataConvertor *pAfuConvert = pAfuMgr->AfuDataConvertor();
std::vector<double> freqVals, yReals, yImags;
yImags = pAfuConvert->GetFftFrequencyData(xValues, yValues, freqVals, yReals);
WriteRecord(recordName, XyFunctionDataTypeGeneral,
XyFunctionUnitFrequencyHz, XyFunctionUnitUnknown, freqVals, yReals, yImags);
#endif
}
/* even data setting */
typedef struct FTK_AFU_even_data_s
{
double x_min; /* minimum value */
double x_incre; /* increment value */
int x_nums; /* # of points */
}FTK_AFU_even_data_t, *FTK_AFU_even_data_p_t;
/* data's unit type */
typedef struct FTK_AFU_unit_type_s
{
int x_type; /* see JA_xy_function_measure */
int x_unit; /* see JA_xy_function_unit */
int y_type; /* see JA_xy_function_measure */
int y_unit; /* see JA_xy_function_unit */
int frf_type; /* see JA_xy_function_measure */
int frf_unit; /* see JA_xy_function_unit */
}FTK_AFU_unit_type_t, *FTK_AFU_unit_type_p_t;
/* AFU record header info - ID */
typedef struct FTK_AFU_record_id_s
{
char reference[4+1];
int reference_id;
char response[4+1];
int response_id;
int load_case;
int version;
int coordinate;
char owner[16+1];
char id_line1[80+1];
char id_line2[80+1];
char id_line3[80+1];
char id_line4[80+1];
}FTK_AFU_record_id_t, *FTK_AFU_record_id_p_t;
/* AFU record header info - abscissa */
typedef struct FTK_AFU_record_abs_s
{
int spacing; /* 0 - Even, 1 - Uneven, 2 - Sequence */
char axis_label[20+1];
char unit_label[20+1];
}FTK_AFU_record_abs_t, *FTK_AFU_record_abs_p_t;
/* AFU record header info - ordinate */
typedef struct FTK_AFU_record_ord_s
{
int data_format; /* 0 - Real, 1 - Complex(real/imaginary), 2 - Complex(magnitude/phase) */
double real_offset;
double imag_offset;
double real_scale;
double imag_scale;
double damping;
char axis_label[20+1];
char unit_label[20+1];
}FTK_AFU_record_ord_t, *FTK_AFU_record_ord_p_t;
/* record data points */
typedef struct FTK_AFU_record_data_point_s
{
int n_vals; /* # of points */
float * x_vals; /* x values */
float * y_vals; /* y values, Real values or Magnitude values for complex data */
float * c_vals; /* Imaginary values or Phase values for complex data */
}FTK_AFU_record_data_point_t, *FTK_AFU_record_data_point_p_t;
typedef struct FTK_AFU_table_func_s
{
char afu_name[1025+1];
char record_name[40+1];
int function_type;
FTK_AFU_even_data_t even_data;
FTK_AFU_unit_type_t unit_data;
FTK_AFU_record_id_t id_data;
FTK_AFU_record_abs_t x_data;
FTK_AFU_record_ord_t y_data;
/* record numerical data */
FTK_AFU_record_data_point_t data_point;
}JA_AFU_DATA_t, *JA_AFU_DATA_p_t;
void NoiseResult::WriteRecord(const std::string &recordName, XyFunctionDataType funcType,
XyFunctionUnit xUnit, XyFunctionUnit yUnit,
const std::vector<double> &xValues, const std::vector<double> &yValues)
{
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
boost::scoped_ptr<AfuData> pAfuData(pAfuMgr->CreateAfuData());
pAfuData->SetFileName(GetResultPathName().c_str());
pAfuData->SetRecordName(recordName.c_str());
pAfuData->SetAxisDefinition(AfuData::AbscissaTypeUneven, xUnit,
AfuData::OrdinateTypeReal, yUnit);
pAfuData->SetFunctionDataType(funcType);
pAfuData->SetRealData(xValues, yValues);
// Hack adding labels
// The following API is not available
// Modify Y Axis Label to "Sound Pressure"
// Modify Y Unit Label to "decibal"
std::string axisLabel("Sound Pressure");
std::string unitLabel("decibal");
JA_AFU_DATA_p_t pJaAfuData = (JA_AFU_DATA_p_t)(pAfuData->GetHandle());
strcpy(pJaAfuData->y_data.axis_label, axisLabel.c_str());
strcpy(pJaAfuData->y_data.unit_label, unitLabel.c_str());
pAfuMgr->CreateRecord(pAfuData.get());
}
#if 0
void NoiseResult::WriteRecord(const std::string &recordName, XyFunctionDataType funcType,
XyFunctionUnit xUnit, XyFunctionUnit yUnit,
const std::vector<double> &xValues,
const std::vector<double> &yRealValues, const std::vector<double> &yImagValues)
{
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
boost::scoped_ptr<AfuData> pAfuData(pAfuMgr->CreateAfuData());
pAfuData->SetFileName(GetResultPathName().c_str());
pAfuData->SetRecordName(recordName.c_str());
pAfuData->SetAxisDefinition(AfuData::AbscissaTypeUneven, xUnit,
AfuData::OrdinateTypeRealImaginary, yUnit);
pAfuData->SetFunctionDataType(funcType);
pAfuData->SetComplexData(xValues, yRealValues, yImagValues);
pAfuMgr->CreateRecord(pAfuData.get());
}
#endif
void NoiseResult::ReadDataFromDat(const std::string &noiseOutputName,
std::vector<double> &xValues, std::vector<double> &yValues) const
{
// Get dat path name
std::string datResultPathName((m_srcDir / noiseOutputName).string());
if (!filesystem::exists(datResultPathName))
throw NXException::Create((boost::format(MSGTXT("Result file %1% does not exist.")) % noiseOutputName).str().c_str());
std::ifstream ifDatResult(datResultPathName.c_str());
// omit first line
std::string strLine;
std::getline(ifDatResult, strLine);
double xVal = 0.0;
double yVal = 0.0;
while (!ifDatResult.eof())
{
ifDatResult >> xVal >> yVal;
xValues.push_back(xVal);
yValues.push_back(yVal);
}
}
void ResultsLoader::operator() ()
{
bool respResultLoaded = false;
bool noiseResultLoaded = false;
ResponseOp2Result respResult;
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
ResponseAfuResult respAfuResult(pPrjProp->GetProjectPath());
respResultLoaded = respResult.Load() && respAfuResult.Load();
NoiseResult noiseResult(filesystem::path(""), std::vector<Point*>()); // for result query only
noiseResultLoaded = noiseResult.Load();
std::string strRespStatus = (respResultLoaded) ?
"Successfully loaded response result. " :
"Failed to load response result, please solve first. ";
std::string strNoiseStatus = (noiseResultLoaded) ?
"Successfully loaded noise result." :
"Failed to load noise result, please solve first.";
UF_UI_set_status(const_cast<char*>((strRespStatus + strNoiseStatus).c_str()));
}
}
<file_sep>/src/NXVsar/include/VsarUI_TrainSettings.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\TrainSettings.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 04-08-2011 (Format: mm-dd-yyyy)
// Time: 21:59
//
//==============================================================================
#ifndef VSARUI_TRAINSETTINGS_H_INCLUDED
#define VSARUI_TRAINSETTINGS_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseCompDialog.hxx>
namespace VsarUI
{
class TrainSettings : public BaseCompDialog
{
// class members
public:
TrainSettings();
~TrainSettings();
static void ShowDialog();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the TrainSettings.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
//virtual int ApplyCb();
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
private:
// Carriage group
//NXOpen::BlockStyler::UIBlock* m_grpCarriage;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_carriageCount;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_carriageLength;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_carriageDistance;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_carriageWeight;// Block type: Expression
// bogie group
//NXOpen::BlockStyler::UIBlock* m_grpBogie;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_bogieLength;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_halfBogieDistance;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_bogieWeight;// Block type: Expression
// wheel set group
//NXOpen::BlockStyler::UIBlock* m_grpWheelSet;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_wheelSetInterval;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_wheelSetWeight;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_wheelSetSglStgSusp;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_wheelSetSglStgSuspDamp;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_wheelSetTwoStgSuspStiff;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_wheelSetTwoStgSuspDamp;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_wheelSetContactCoefElast;// Block type: Expression
};
}
#endif //VSARUI_TRAINSETTINGS_H_INCLUDED
<file_sep>/src/NXVsar/src/vsar/Vsar_Slab.cxx
#include <uf_defs.h>
#include <Vsar_Slab.hxx>
//#include <boost/cast.hpp>
#include <NXOpen/Expression.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Init_Utils.hxx>
using namespace NXOpen;
using namespace boost;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
static CompAttrInfo attrExpInfo[] =
{
{SLAB_SUPPORT_COUNT_ID_NAME, SLAB_PRT_PART_NAME, SLAB_SUPPORT_COUNT_EXP_NAME},
{FASTENER_STIFFNESS_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_FASTENER_STIFFNESS_EXP_NAME},
{FASTENER_DAMPING_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_FASTENER_DAMPING_EXP_NAME},
{MASS_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_MASS_RATIO_EXP_NAME},
{STIFFNESS_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_STIFFNESS_RATIO_EXP_NAME},
{DAMPING_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_DAMPING_RATIO_EXP_NAME},
{ELASTIC_MODULUS_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_ELASTIC_MODULUS_EXP_NAME},
{POISSON_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_POISSON_RATIO_EXP_NAME},
};
Slab::Slab() : BaseComponent(attrExpInfo, N_ELEMENTS(attrExpInfo)),
m_oldSupportCount(0)
{
}
Slab::~Slab()
{
}
void Slab::OnInit()
{
m_oldSupportCount = GetSupportCount();
}
int Slab::GetSupportCount() const
{
Expression *pSupportCntExp = NULL;
if (!m_compAttrs.empty())
pSupportCntExp = GetExpression(m_compAttrs[0].m_partName, m_compAttrs[0].m_expName);
return pSupportCntExp ? pSupportCntExp->IntegerValue() : 0;
}
bool Slab::CanUpdateRailSlabFEModel() const
{
return false;
}
bool Slab::CanUpdateBraseFEModel() const
{
return false;
}
bool Slab::CanUpdateRailSlabConnection() const
{
return false;
}
bool Slab::CanUpdateBraseConnection() const
{
return GetSupportCount() != m_oldSupportCount;
}
}<file_sep>/src/NXVsdaneWrap/NXVsdaneWrap/include/NXVsdane.hxx
// NXDotNetWrap.h
#ifndef VSDANE_NXVSDANE_HXX_INCLUDED
#define VSDANE_NXVSDANE_HXX_INCLUDED
#pragma once
#pragma unmanaged
#ifdef LIBNXVSDANE
#define LIBNXVSDANEEXPORT __declspec(dllexport)
#else
#define LIBNXVSDANEEXPORT __declspec(dllimport)
#endif
#include <string>
//#include <uf_defs.h>
//namespace NXOpen
//{
// class Expression;
//
// namespace CAE
// {
// class IFEModel;
// class CAEFace;
// }
//}
typedef unsigned int tag_t;
namespace Vsdane
{
//LIBNXVSDANEEXPORT void CreateSweptMesh(NXOpen::CAE::IFEModel *pFeModel,
// const std::string &meshColName, const std::string &meshName,
// NXOpen::CAE::CAEFace* pSrcFace, NXOpen::CAE::CAEFace *pTargetFace, NXOpen::Expression *pEleSize);
//LIBNXVSDANEEXPORT void CreateSweptMesh(void *pFeModel,
// const std::string &meshColName, const std::string &meshName,
// void* pSrcFace, void *pTargetFace, void *pEleSize);
LIBNXVSDANEEXPORT void CreateSweptMesh(tag_t tFeModel,
const std::string &meshColName, const std::string &meshName,
tag_t tSrcFace, tag_t tTargetFace, tag_t tpEleSize);
}
#endif //VSDANE_NXVSDANE_HXX_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_Names.hxx
#ifndef VSAR_NAMES_H_INCLUDED
#define VSAR_NAMES_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
namespace Vsar
{
const char * const SLAB_PART_NAME = "slab.prt";
const char * const SLAB_BODY_NAME = "SLAB_BODY";
const char * const SLAB_ASSEM_NAME = "slabs.prt";
const char * const SLAB_ELEMENT_SIZE_NAME = "Slab_Element_Size";
const char * const FACE_NAME_TOP = "TOP_FACE";
const char * const FACE_NAME_BOTTOM = "BOTTOM_FACE";
const char * const FACE_NAME_FRONT = "FRONT_FACE";
const char * const FACE_NAME_REAR = "REAR_FACE";
const char * const TUNNEL_FACE_NAME_TOP = "TUNNEL_TOP_FACE";
const char * const TUNNEL_FACE_NAME_BOTTOM = "TUNNEL_BOTTOM_FACE";
// Bridge
const char * const BRIDGE_BEAM_PART_NAME = "beam.prt";
const char * const BRIDGE_BEAM_BODY_NAME = "BRIDGE_BEAM_BODY";
const char * const BRIDGE_MESH_NAME = "Bridge_Mesh";
const char * const BRIDGE_MESH_COLLECTOR_NAME = "Bridge_Mesh_Collector";
const char * const BRIDGE_ELEMENT_SIZE_NAME = "Bridge_Element_Size";
const char * const CARRIAGE_PART_NAME = "carriage.prt";
const char * const CARRIAGE_BODY_NAME = "CARRIAGE_BODY";
// Rail
const char * const RAIL_CONNECTION_NAME = "RAIL_CONNECT_POINT";
const char * const RAIL_MESH_NAME = "Rail_Mesh";
const char * const RAIL_SLAB_CONNECTION_MESH_NAME = "Rail_Slab_Connection_Mesh";
const char * const RAIL_SLAB_CONNECTION_COLLECTOR_NAME = "Rail_Slab_Connection_Collector";
const char * const RAIL_SLAB_CONNECTION_NAME = "RAIL_SLAB_CONNECTION";
// Slab
const char * const SLAB_CONNECT_TO_RAIL_NAME = "SLAB_CONNECT_TO_RAIL_POINT";
const char * const SLAB_MESH_NAME = "Slab_Mesh";
const char * const SLAB_MESH_COLLECTOR_NAME = "Slab_Mesh_Collector";
const char * const SLAB_BASE_CONNECTION_MESH_NAME = "Slab_Base_Connection_Mesh";
const char * const SLAB_BASE_CONNECTION_COLLECTOR_NAME = "Slab_Base_Connection_Collector";
const char * const SLAB_BASE_CONNECTION_NAME = "SLAB_BASE_CONNECTION";
const char * const SLAB_CENTER_POINT_NAME = "SLAB_CENTER_POINT";
// Base
const char * const BASE_MESH_COLLECTOR_NAME = "Base_Mesh_Collector";
const char * const BASE_BODY_NAME = "BASE_BODY";
// Tunnel
const char * const TUNNEL_CONCRETE_MESH_NAME = "Concrete_Tunnel_Mesh";
// Part Name
const char * const RAIL_SLAB_FEM_PART_NAME = "RailSlab_f";
const char * const BRIDGE_FEM_PART_NAME = "Bridge_f";
const char * const BASE_FEM_PART_NAME = "Base_f";
const char * const TUNNEL_FEM_PART_NAME = "Tunnel_f";
const char * const SLAB_PRT_PART_NAME = "slab";
const char * const SLABS_PRT_PART_NAME = "slabs";
const char * const BEAM_PRT_PART_NAME = "beam";
const char * const BRIDGE_PRT_PART_NAME = "bridge";
const char * const BASE_PRT_PART_NAME = "base";
const char * const TUNNEL_PRT_PART_NAME = "tunnel";
const char * const TRAIN_PRT_PART_NAME = "train";
const char * const CARRIAGE_PRT_PART_NAME = "carriage";
//////////////////////////////////////////////////////////////////////////
// Expression Name-Value
// Rail
const char * const LINEAR_DENSITY_ID_NAME = "linearDensity"; // common id name
const char * const RAIL_LINEAR_DENSITY_EXP_NAME = "Rail_Linear_Density";
const char * const RAIL_MASS_DENSITY_EXP_NAME = "Rail_Mass_Density";
const char * const ELASTIC_MODULUS_ID_NAME = "elasticModulus"; // common id name
const char * const RAIL_ELASTIC_MODULUS_EXP_NAME = "Rail_Elastic_Modulus";
const char * const POISSON_RATIO_ID_NAME = "poissonRatio"; // common id name
const char * const RAIL_POISSON_RATIO_EXP_NAME = "Rail_Poisson_Ratio";
const char * const RAIL_ELEMENT_SIZE_EXP_NAME = "Rail_Element_Size";
const char * const RAIL_SECTION_INERTIA_EXP_NAME = "Rail_Section_Inertia";
// Slab
const char * const SLAB_SUPPORT_COUNT_ID_NAME = "supportCnt";
const char * const SLAB_SUPPORT_COUNT_EXP_NAME = "Support_Count";
const char * const MASS_RATIO_ID_NAME = "massRatio"; // common id name
const char * const SLAB_MASS_RATIO_EXP_NAME = "Slab_Mass_Ratio";
const char * const STIFFNESS_RATIO_ID_NAME = "stiffnessRatio"; // common id name
const char * const SLAB_STIFFNESS_RATIO_EXP_NAME = "Slab_Stiffness_Ratio";
const char * const DAMPING_RATIO_ID_NAME = "dampingRatio"; // common id name
const char * const SLAB_DAMPING_RATIO_EXP_NAME = "Slab_Damping_Ratio";
const char * const FASTENER_STIFFNESS_ID_NAME = "fastenerStiffness"; // common id name
const char * const SLAB_FASTENER_STIFFNESS_EXP_NAME = "Slab_Fastener_Stiffness";
const char * const FASTENER_DAMPING_ID_NAME = "fastenerDamping"; // common id name
const char * const SLAB_FASTENER_DAMPING_EXP_NAME = "Slab_Fastener_Damping";
const char * const SLAB_ELASTIC_MODULUS_EXP_NAME = "Slab_Elastic_Modulus";
const char * const SLAB_POISSON_RATIO_EXP_NAME = "Slab_Poisson_Ratio";
const char * const SLAB_LENGTH_EXP_NAME = "Slab_Length";
const char * const SLAB_WIDTH_EXP_NAME = "Slab_Width";
const char * const SLAB_ELEMENT_SIZE_EXP_NAME = "Slab_Element_Size";
const char * const SLAB_COUNT_EXP_NAME = "Slab_Count";
const char * const SLAB_SECTION_INERTIA_EXP_NAME = "Slab_Section_Inertia";
// Bridge
const char * const BRIDGE_ELASTIC_MODULUS_EXP_NAME = "Bridge_Elastic_Modulus";
const char * const BRIDGE_POISSON_RATIO_EXP_NAME = "Bridge_Poisson_Ratio";
const char * const BRIDGE_MASS_DENSITY_ID_NAME = "massDensity"; // common id name
const char * const BRIDGE_MASS_DENSITY_EXP_NAME = "Bridge_Mass_Density";
const char * const WIDTH_ID_NAME = "width"; // common id name
const char * const WIDTH_EXP_NAME = "Width";
const char * const HEIGHT_ID_NAME = "height"; // common id name
const char * const HEIGHT_EXP_NAME = "Height";
const char * const SPAN_COUNT_ID_NAME = "spanCnt"; // common id name
const char * const SPAN_COUNT_EXP_NAME = "Beam_Span";
const char * const SECTION_AREA_EXP_NAME = "Section_Area";
const char * const BRIDGE_SECTION_INERTIA_EXP_NAME = "Bridge_Section_Inertia";
// Tunnel
const char * const DIAMETER_ID_NAME = "diameter"; // common id name
const char * const DIAMETER_EXP_NAME = "Diameter";
const char * const TUNNEL_H1_ID_NAME = "h1";
const char * const TUNNEL_H1_EXP_NAME = "Height_Top";
const char * const TUNNEL_H2_ID_NAME = "h2";
const char * const TUNNEL_H2_EXP_NAME = "Height_Mid";
const char * const TUNNEL_H3_ID_NAME = "h3";
const char * const TUNNEL_H3_EXP_NAME = "Height_Bottom";
const char * const TUNNEL_CONCRETE_ELASTIC_MODULUS_EXP_NAME = "Tunnel_Concrete_Elastic_Modulus";
const char * const TUNNEL_CONCRETE_MASS_DENSITY_EXP_NAME = "Tunnel_Concrete_Mass_Density";
const char * const TUNNEL_SECTION_INERTIA_EXP_NAME = "Tunnel_Section_Inertia";
// Base
const char * const BASE_ELASTIC_MODULUS_EXP_NAME = "Base_Elastic_Modulus";
const char * const BASE_MASS_DENSITY_EXP_NAME = "Base_Mass_Density";
const char * const BASE_SECTION_INERTIA_EXP_NAME = "Base_Section_Inertia";
// Train
const char * const TRAIN_CARRIAGE_COUNT_ID_NAME = "carriageCount";
const char * const TRAIN_CARRIAGE_COUNT_EXP_NAME = "Carriage_Count";
const char * const TRAIN_CARRIAGE_DISTANCE_ID_NAME = "carriageDistance";
const char * const TRAIN_CARRIAGE_DISTANCE_EXP_NAME = "Distance";
const char * const TRAIN_CARRIAGE_WEIGHT_ID_NAME = "carriageWeight";
const char * const TRAIN_CARRIAGE_WEIGHT_EXP_NAME = "Weight";
const char * const TRAIN_CARRIAGE_LENGTH_EXP_NAME = "Length";
const char * const TRAIN_BOGIE_LENGTH_ID_NAME = "bogieLength";
const char * const TRAIN_BOGIE_LENGTH_EXP_NAME = "BogieLength";
const char * const TRAIN_HALF_BOGIE_DISTANCE_ID_NAME = "halfBogieDistance";
const char * const TRAIN_HALF_BOGIE_DISTANCE_EXP_NAME = "HalfBogieDistance";
const char * const TRAIN_BOGIE_WEIGHT_ID_NAME = "bogieWeight";
const char * const TRAIN_BOGIE_WEIGHT_EXP_NAME = "BogieWeight";
const char * const TRAIN_WHEELSET_INTERVAL_ID_NAME = "wheelSetInterval";
const char * const TRAIN_WHEELSET_INTERVAL_EXP_NAME = "WheelSetInterval";
const char * const TRAIN_WHEELSET_WEIGHT_ID_NAME = "wheelSetWeight";
const char * const TRAIN_WHEELSET_WEIGHT_EXP_NAME = "WheelSetWeight";
const char * const TRAIN_WHEELSET_SGL_STG_SUSP_STIFF_ID_NAME = "wheelSetSglStgSuspStiff";
const char * const TRAIN_WHEELSET_SGL_STG_SUSP_STIFF_EXP_NAME = "WheelSetSglStgSuspStiff";
const char * const TRAIN_WHEELSET_SGL_STG_SUSP_DAMP_ID_NAME = "wheelSetSglStgSuspDamp";
const char * const TRAIN_WHEELSET_SGL_STG_SUSP_DAMP_EXP_NAME = "WheelSetSglStgSuspDamp";
const char * const TRAIN_WHEELSET_TWO_STG_SUSP_STIFF_ID_NAME = "wheelSetTwoStgSuspStiff";
const char * const TRAIN_WHEELSET_TWO_STG_SUSP_STIFF_EXP_NAME = "WheelSetTwoStgSuspStiff";
const char * const TRAIN_WHEELSET_TWO_STG_SUSP_DAMP_ID_NAME = "wheelSetTwoStgSuspDamp";
const char * const TRAIN_WHEELSET_TWO_STG_SUSP_DAMP_EXP_NAME = "WheelSetTwoStgSuspDamp";
const char * const TRAIN_WHEELSET_CONTACT_COEF_ELAST_ID_NAME = "wheelSetContactCoefElast";
const char * const TRAIN_WHEELSET_CONTACT_COEF_ELAST_EXP_NAME = "WheelSetContactCoefElast";
//////////////////////////////////////////////////////////////////////////
// Solution
//////////////////////////////////////////////////////////////////////////
const char * const TRAIN_SPEED_ID_NAME = "trainSpeed";
const char * const TRAIN_SPEED_EXP_NAME = "Train_Speed";
const char * const COMPUTE_TIME_STEP_ID_NAME = "timeStep";
const char * const COMPUTE_TIME_STEP_EXP_NAME = "Compute_Time_Step";
const char * const NUM_OF_TIME_STEPS_EXP_NAME = "Num_Of_Time_Steps";
// Group Name
const char * const ELEMENT_FOR_RESPONSE_GROUP_NAME = "ElementForResponse";
const char * const NODE_FOR_RESPONSE_GROUP_NAME = "NodeForResponse";
const char * const NODES_FOR_NOISE_GROUP_NAME = "NodesForNoise";
const char * const RESPONSE_STRUCTURAL_OUTPUT_OBJECT_NAME = "Response_Structural_Output";
const char * const NOISE_STRUCTURAL_OUTPUT_OBJECT_NAME = "Noise_Structural_Output";
const char * const TIME_STEP_OUTPUT_OBJECT_NAME = "Time_Step";
// Constraint Name
const char * const FRONT_CONSTRAINT_NAME = "FrontConstraint";
const char * const MIDDLE_CONSTRAINT_NAME = "MiddleConstraint";
const char * const REAR_CONSTRAINT_NAME = "RearConstraint";
const char * const BOTTOM_CONSTRAINT_NAME = "BottomConstraint";
const char * const VERTICAL_CONSTRAINT_NAME = "VerticalConstraint";
//////////////////////////////////////////////////////////////////////////
// Binary Folder Name
const char * const SOLVER_FOLDER_NAME = "bin";
const char * const SOLVER_DATA_FOLDER_NAME = "data";
//const char * const SOLVER_ELASTIC_EXE_NAME = "elastic.exe";
const char * const SOLVER_ELASTIC_EXE_NAME = "ForceActOnRail.exe";
const char * const SOLVER_ELASTIC_CONVERT_EXE_NAME = "read_excitation.exe";
const char * const SOLVER_NOISE_EXE_NAME = "noise.exe";
// log file
const char * const SOLVE_ELASTIC_SUCCESS_LOG_NAME = "excitation_finished.log";
const char * const SOLVE_ELASTIC_FAIL_LOG_NAME = "excitation_error.log";
const char * const SOLVE_CONVERT_ELASTIC_FAIL_LOG_NAME = "read_excitation_error.log";
const char * const SOLVE_NOISE_FAIL_LOG_NAME = "rail_noise_error.log";
//////////////////////////////////////////////////////////////////////////
// Excitation input names
const char * const IRR_DATA_FILE_NAME = "irr.dat";
const char * const VEHICLE_INPUT_FILE_NAME = "vehicle.dat";
const char * const RAIL_INPUT_FILE_NAME = "rail.dat";
const char * const SLAB_INPUT_FILE_NAME = "fslab.dat";
const char * const BEAM_INPUT_FILE_NAME = "beam.dat";
const char * const CALCULATION_INPUT_FILE_NAME = "calculation.dat";
//////////////////////////////////////////////////////////////////////////
// Excitation output names
const char * const VEHICLE_OUTPUT_FILE_NAME = "vehicle_out.dat";
const char * const WHEEL_OUTPUT_FILE_NAME = "wheel_out.dat";
const char * const TURN_OUTPUT_FILE_NAME = "turn_out.dat";
// Read_excitation input names
const char * const CONVERT_EXCITATION_INPUT_FILE_NAME = "excitation_nodes.dat";
// Response result record name
const char * const RESPONSE_RESULT_RECORD_PATTERN_NAME = "%1%-Node-%2%-%3%";
// Noise input names
const char * const NOISE_FREQUENCE_INPUT_FILE_NAME = "node%1%.txt";
const char * const NOISE_COORDINATE_INPUT_FILE_NAME = "output_points.dat";
const char * const NOISE_TIME_OUTPUT_FILE_NAME = "Rail_Noise_Time%02d.out";
const char * const NOISE_FREQ_OUTPUT_FILE_NAME = "Rail_Noise_Freq%02d.out";
// Noise result record name
const char * const NOISE_RESULT_TIME_RECORD_PATTERN_NAME = "Time_SoundPressure_(%.2f,%.2f,%.2f)";
const char * const NOISE_RESULT_FREQ_RECORD_PATTERN_NAME = "Freq_SoundPressure_(%.2f,%.2f,%.2f)";
const char * const NASTRAN_PCH_RESULT_FILE_PATTERN_NAME = "%1%_s-%2%.pch";
// Find Object name
const char * const FIND_MESH_PATTERN_NAME = "Mesh[%1%]";
const char * const FIND_MESH_OCC_PATTERN_NAME = "MeshOccurrence[%1%]";
const char * const FIND_MESH_COL_PATTERN_NAME = "MeshCollector[%1%]";
const char * const FIND_MESH_COL_OCC_PATTERN_NAME = "MeshCollectorOccurrence[%1%]";
const char * const FIND_MODELING_OBJ_PATTERN_NAME = "SsmoPropTable[%1%]";
const char * const FIND_SOLUTION_PATTERN_NAME = "Solution[%1%]";
const char * const FIND_CONSTRAINT_PATTERN_NAME = "Constraint[%1%]";
const double mmToMConvert = 0.001;
enum CompLayer
{
RAIL_CONNECTION_POINT_LAYER = 128,
SLAB_CONNECT_TO_RAIL_POINT_LAYER = 129,
SLAB_CONNECT_TO_BASE_LEFT_POINT_LAYER = 130,
SLAB_CONNECT_TO_BASE_RIGHT_POINT_LAYER = 131,
BASE_CONNECT_TO_SLAB_LEFT_POINT_LAYER = 132,
BASE_CONNECT_TO_SLAB_RIGHT_POINT_LAYER = 133
};
const unsigned int NOISE_DATUM_POINT_COUNT = 14;
}
#endif //VSAR_NAMES_H_INCLUDED
<file_sep>/src/NXVsar/src/vsar/Vsar_SolveOperation.cxx
#include <uf_defs.h>
#include <Vsar_SolveOperation.hxx>
#include <cstdlib>
#include <cmath>
#include <regex>
#include <fstream>
#include <iomanip>
#include <boost/filesystem.hpp>
#include <boost/scope_exit.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/shared_array.hpp>
#include <boost/cast.hpp>
#include <boost/foreach.hpp>
#include <boost/format.hpp>
#include <uf_unit_types.h>
#include <uf.h>
#include <uf_sf.h>
#include <uf_vec.h>
#include <NXOpen/Session.hxx>
#include <NXOpen/PartCollection.hxx>
#include <NXOpen/Part.hxx>
#include <NXOpen/Expression.hxx>
#include <NXOpen/ExpressionCollection.hxx>
#include <NXOpen/UnitCollection.hxx>
#include <NXOpen/Point.hxx>
#include <NXOpen/PointCollection.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/NXObjectManager.hxx>
#include <NXOpen/CAE_FTK_DataManager.hxx>
#include <NXOpen/CAE_CaeGroup.hxx>
#include <NXOpen/CAE_CAEBody.hxx>
//#include <NXOpen/CAE_CAEFace.hxx>
#include <NXOpen/CAE_CaeGroupCollection.hxx>
#include <NXOpen/CAE_FemPart.hxx>
#include <NXOpen/CAE_SimPart.hxx>
#include <NXOpen/CAE_SimSimulation.hxx>
#include <NXOpen/CAE_SimSolution.hxx>
#include <NXOpen/CAE_SimConstraint.hxx>
#include <NXOpen/CAE_SimConstraintCollection.hxx>
#include <NXOpen/CAE_SetManager.hxx>
#include <NXOpen/CAE_FEModel.hxx>
#include <NXOpen/CAE_FEModelOccurrence.hxx>
#include <NXOpen/CAE_Mesh.hxx>
#include <NXOpen/CAE_IMeshManager.hxx>
#include <NXOpen/CAE_FENode.hxx>
#include <NXOpen/CAE_FENodeLabelMap.hxx>
#include <NXOpen/CAE_ModelingObjectPropertyTable.hxx>
#include <NXOpen/CAE_ModelingObjectPropertyTableCollection.hxx>
#include <NXOpen/CAE_PropertyTable.hxx>
#include <NXOpen/CAE_AfuManager.hxx>
#include <NXOpen/CAE_AfuDataConvertor.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Project.hxx>
#include <Vsar_Component.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Result.hxx>
#include <Vsar_Utils.hxx>
using namespace boost;
using namespace NXOpen;
using namespace NXOpen::CAE;
using namespace Vsar;
//#pragma warning(push)
//#pragma warning(disable: 4355)
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
class NoiseDatumPointsUpdater
{
public:
NoiseDatumPointsUpdater()
{
}
~NoiseDatumPointsUpdater()
{
}
void Update();
Point* GetSlabCenter() const;
protected:
std::vector<FENode*> GetDatumNodes() const;
std::vector<CAEFace*> GetSlabFaces() const;
std::vector<FENode*> GetCandidateNodes() const;
std::vector<Point3d> GetDatumPoints() const;
Point3d GetSlabDim() const;
};
void NoiseDatumPointsUpdater::Update()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
CaeGroup *pGroup = pSimPart->CaeGroups()->FindObject(NODES_FOR_NOISE_GROUP_NAME);
size_t numGroupMember = pGroup->GetEntities().size();
// Update noise datum points manually
if (numGroupMember != NOISE_DATUM_POINT_COUNT)
{
std::vector<FENode*> datumNodes(GetDatumNodes());
if (datumNodes.size() == NOISE_DATUM_POINT_COUNT)
{
std::vector<TaggedObject*> datumNodeTags;
datumNodeTags.resize(datumNodes.size());
std::copy(datumNodes.begin(), datumNodes.end(), datumNodeTags.begin());
pGroup->SetEntities(datumNodeTags);
}
else
throw NXException::Create("Failed to update noise datum points.");
}
}
std::vector<FENode*> NoiseDatumPointsUpdater::GetDatumNodes() const
{
std::vector<Point3d> datumPts(GetDatumPoints());
std::vector<FENode*> candidateNodes(GetCandidateNodes());
std::vector<FENode*> datumNodes;
datumNodes.reserve(datumPts.size());
for (int idx = 0; idx < int(candidateNodes.size()) && datumNodes.size() != datumPts.size(); idx++)
{
Point3d candidatePt(candidateNodes[idx]->Coordinates());
for (int jdx = 0; jdx < int(datumPts.size()); jdx++)
{
double distance = 0.0;
UF_VEC3_distance(reinterpret_cast<double*>(&datumPts[jdx]),
reinterpret_cast<double*>(&candidatePt), &distance);
if (distance < 0.0000001)
{
datumNodes.push_back(candidateNodes[idx]);
break;
}
}
}
if (datumNodes.size() != NOISE_DATUM_POINT_COUNT)
return std::vector<FENode*>();
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
SimPart *pSimPrt = pPrjProp->GetSimPart();
FEModelOccurrence *pSimFEModel = pSimPrt->Simulation()->Femodel();
FEModelOccurrence *pRailFEModelOcc = GetFEModelOccOfNode(pSimFEModel, datumNodes.front());
return GetNodeOcc(pSimFEModel, GetNodeOffset(pRailFEModelOcc), datumNodes);
}
std::vector<Point3d> NoiseDatumPointsUpdater::GetDatumPoints() const
{
std::vector<Point3d> datumPts;
datumPts.reserve(NOISE_DATUM_POINT_COUNT);
Point3d slabDim(GetSlabDim());
Point3d slabCenter(GetSlabCenter()->Coordinates());
for (int idx = 0; idx < NOISE_DATUM_POINT_COUNT/2; idx++)
{
datumPts.push_back(Point3d(slabCenter.X - slabDim.X/2, slabCenter.Y, slabCenter.Z - slabDim.Z/2 + idx * slabDim.Z/6));
datumPts.push_back(Point3d(slabCenter.X + slabDim.X/2, slabCenter.Y, slabCenter.Z - slabDim.Z/2 + idx * slabDim.Z/6));
}
return datumPts;
}
Point* NoiseDatumPointsUpdater::GetSlabCenter() const
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
PointCollection *pPoints = pSimPart->Points();
std::string pointName(SLAB_CENTER_POINT_NAME);
// Get points
for (PointCollection::iterator iter = pPoints->begin(); iter != pPoints->end(); ++iter)
{
if (pointName.compare((*iter)->Name().GetText()) == 0)
return *iter;
}
return NULL;
}
Point3d NoiseDatumPointsUpdater::GetSlabDim() const
{
Expression *pLengthExp = BaseComponent::GetExpression(SLAB_PRT_PART_NAME, SLAB_LENGTH_EXP_NAME);
Expression *pWidghExp = BaseComponent::GetExpression(SLAB_PRT_PART_NAME, SLAB_WIDTH_EXP_NAME);
return Point3d(pWidghExp->Value(), 0.0, pLengthExp->Value());
}
std::vector<FENode*> NoiseDatumPointsUpdater::GetCandidateNodes() const
{
std::vector<CAEFace*> slabTopFaces(GetSlabFaces());
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
SimPart *pSimPrt = pPrjProp->GetSimPart();
return GetNodesOnFace(pSimPrt, slabTopFaces);
}
std::vector<CAEFace*> NoiseDatumPointsUpdater::GetSlabFaces() const
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
FemPart *pSlabFem = pPrjProp->GetRailSlabFemPart();
return GetCaeFacesOfBodyByName(pSlabFem, SLAB_BODY_NAME, FACE_NAME_TOP);
}
class ConstraintUpdater
{
public:
ConstraintUpdater()
{
}
~ConstraintUpdater()
{
}
void Update();
protected:
void SetConstraint(SimSimulation *pSim, const std::string &constraintName, const std::vector<FENode*> &pNodes);
void ResolveConflicts(SimSimulation *pSim);
std::vector<FENode*> GetNodeOccsOnFace(const std::vector<CAEFace*> &pFaceProtos) const;
std::vector<FENode*> GetAllNodes(IFEModel *pFEModel) const;
void GetBridgeFaces(FemPart *pPrt, CAEFace **ppFrontFace,
std::vector<CAEFace*> &middleFaces, CAEFace **ppRearFace);
CAEFace* GetBridgeFrontFace(const std::vector<CAEBody*> &pBodies);
std::vector<CAEFace*> GetBridgeMiddleFaces(const std::vector<CAEBody*> &pBodies,
CAEFace *pFrontFace, CAEFace *pRearFace);
CAEFace* GetBridgeRearFace(const std::vector<CAEBody*> &pBodies);
};
void ConstraintUpdater::Update()
{
// check constraints
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
FemPart *pBracePart = pPrjProp->GetBraceFemPart();
SimPart *pSimPart = pPrjProp->GetSimPart();
SimSimulation *pSim = pSimPart->Simulation();
std::vector<CAEFace*> pFaceProtos;
std::vector<FENode*> pNodes;
switch (pPrjProp->GetProjectType())
{
case Project::ProjectType_Bridge:
{
CAEFace *pFrontFace = NULL;
CAEFace *pRearFace = NULL;
std::vector<CAEFace*> pMiddleFaces;
GetBridgeFaces(pBracePart, &pFrontFace, pMiddleFaces, &pRearFace);
// Set Front face Constraint
pFaceProtos.clear();
pFaceProtos.push_back(pFrontFace);
pNodes = GetNodeOccsOnFace(pFaceProtos);
SetConstraint(pSim, FRONT_CONSTRAINT_NAME, pNodes);
// Set Middle faces Constraint
pFaceProtos.clear();
pFaceProtos = pMiddleFaces;
pNodes = GetNodeOccsOnFace(pFaceProtos);
SetConstraint(pSim, MIDDLE_CONSTRAINT_NAME, pNodes);
// Set Rear face Constraint
pFaceProtos.clear();
pFaceProtos.push_back(pRearFace);
pNodes = GetNodeOccsOnFace(pFaceProtos);
SetConstraint(pSim, REAR_CONSTRAINT_NAME, pNodes);
}
break;
case Project::ProjectType_Selmi_Infinite:
pFaceProtos = GetCaeFacesOfBodyByName(pBracePart, BASE_BODY_NAME, FACE_NAME_BOTTOM);
pNodes = GetNodeOccsOnFace(pFaceProtos);
SetConstraint(pSim, BOTTOM_CONSTRAINT_NAME, pNodes);
break;
case Project::ProjectType_Tunnel:
pFaceProtos = GetCaeFaceByName(pBracePart, TUNNEL_FACE_NAME_BOTTOM);
pNodes = GetNodeOccsOnFace(pFaceProtos);
SetConstraint(pSim, BOTTOM_CONSTRAINT_NAME, pNodes);
break;
default:
break;
}
// Set Vertical Constraint
pNodes = GetAllNodes(pSim->Femodel());
SetConstraint(pSim, VERTICAL_CONSTRAINT_NAME, pNodes);
ResolveConflicts(pSim);
}
class CaeBodyPosZComparer : public std::binary_function<CAEBody*, CAEBody*, bool>
{
public:
CaeBodyPosZComparer()
{
}
~CaeBodyPosZComparer()
{
}
bool operator () (CAEBody *pBody1, CAEBody *pBody2) const
{
Point3d centroid1(GetCentroidOfBody(pBody1));
Point3d centroid2(GetCentroidOfBody(pBody2));
return centroid1.Z < centroid2.Z;
}
private:
Point3d GetCentroidOfBody(CAEBody *pBody) const
{
double pdVolume;
Point3d centroid;
int rtc = UF_SF_body_ask_volume_and_centroid(pBody->Tag(), &pdVolume, (double*)(¢roid));
if (rtc != 0)
throw NXException::Create(rtc);
return centroid;
}
};
void ConstraintUpdater::GetBridgeFaces(FemPart *pPrt, CAEFace **ppFrontFace,
std::vector<CAEFace*> &middleFaces, CAEFace **ppRearFace)
{
std::vector<CAEBody*> pBodies(GetCaeBodyByName(pPrt, BRIDGE_BEAM_BODY_NAME));
*ppFrontFace = GetBridgeFrontFace(pBodies);
*ppRearFace = GetBridgeRearFace(pBodies);
middleFaces = GetBridgeMiddleFaces(pBodies, *ppFrontFace, *ppRearFace);
}
CAEFace* ConstraintUpdater::GetBridgeFrontFace(const std::vector<CAEBody*> &pBodies)
{
if (pBodies.empty())
throw NXException::Create("No bridge geometry exists. The model may be broken.");
CAEBody *pFrontBody = NULL;
if (pBodies.size() > 1)
{
// do filtering
pFrontBody = *(std::min_element(pBodies.begin(), pBodies.end(), CaeBodyPosZComparer()));
}
else
pFrontBody = pBodies.back();
std::vector<CAEFace*> pFaceProtos;
pFaceProtos = GetCaeFaceByName(pFrontBody, FACE_NAME_FRONT);
if (pFaceProtos.size() != 1)
throw NXException::Create("Wrong bridge geometry face information. The model may be broken.");
return pFaceProtos.back();
}
std::vector<CAEFace*> ConstraintUpdater::GetBridgeMiddleFaces(const std::vector<CAEBody*> &pBodies,
CAEFace *pFrontFace, CAEFace *pRearFace)
{
std::vector<CAEFace*> pFaceProtos;
// Get all faces
for (int idx = 0; idx < int(pBodies.size()); idx++)
{
std::vector<CAEFace*> pFaces(GetCaeFaceByName(pBodies[idx], FACE_NAME_REAR));
pFaceProtos.insert(pFaceProtos.end(), pFaces.begin(), pFaces.end());
}
// removes front and rear faces
pFaceProtos.erase(std::remove(pFaceProtos.begin(), pFaceProtos.end(), pFrontFace), pFaceProtos.end());
pFaceProtos.erase(std::remove(pFaceProtos.begin(), pFaceProtos.end(), pRearFace), pFaceProtos.end());
return pFaceProtos;
}
CAEFace* ConstraintUpdater::GetBridgeRearFace(const std::vector<CAEBody*> &pBodies)
{
if (pBodies.empty())
throw NXException::Create("No bridge geometry exists. The model may be broken.");
CAEBody *pRearBody = NULL;
if (pBodies.size() > 1)
{
// do filtering
pRearBody = *(std::max_element(pBodies.begin(), pBodies.end(), CaeBodyPosZComparer()));
}
else
pRearBody = pBodies.back();
std::vector<CAEFace*> pFaceProtos;
pFaceProtos = GetCaeFaceByName(pRearBody, FACE_NAME_REAR);
if (pFaceProtos.size() != 1)
throw NXException::Create("Wrong bridge geometry face information. The model may be broken.");
return pFaceProtos.back();
}
std::vector<FENode*> ConstraintUpdater::GetNodeOccsOnFace(const std::vector<CAEFace*> &pFaceProtos) const
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
SimPart *pSimPrt = pPrjProp->GetSimPart();
std::vector<FENode*> nodeProtos(GetNodesOnFace(pSimPrt, pFaceProtos));
if (nodeProtos.empty())
return std::vector<FENode*>();
FEModelOccurrence *pSimFEModel = pSimPrt->Simulation()->Femodel();
FEModelOccurrence *pNodeFEModelOcc = GetFEModelOccOfNode(pSimFEModel, nodeProtos.front());
return GetNodeOcc(pSimFEModel, GetNodeOffset(pNodeFEModelOcc), nodeProtos);
}
std::vector<FENode*> ConstraintUpdater::GetAllNodes(IFEModel *pFEModel) const
{
std::vector<FENode*> pNodes;
boost::scoped_ptr<FENodeLabelMap> pNodeLabelMap(pFEModel->FenodeLabelMap());
pNodes.reserve(pNodeLabelMap->NumNodes());
int nodeLabel = 0;
FENode *pCurNode = NULL;
while (1)
{
nodeLabel = pNodeLabelMap->AskNextNodeLabel(nodeLabel);
pCurNode = pNodeLabelMap->GetNode(nodeLabel);
if (pCurNode)
pNodes.push_back(pCurNode);
else
break;
}
return pNodes;
}
void ConstraintUpdater::SetConstraint(SimSimulation *pSim, const std::string &constraintName,
const std::vector<FENode*> &pNodes)
{
std::string strConstraint((boost::format(FIND_CONSTRAINT_PATTERN_NAME) % constraintName.c_str()).str());
SimConstraint *pConstraint(dynamic_cast<SimConstraint *>(pSim->Constraints()->FindObject(strConstraint)));
SetManager *pSetMgr = pConstraint->TargetSetManager();
std::vector<SetObject> pSetObjs;
pSetObjs.reserve(pNodes.size());
BOOST_FOREACH(FENode *pNode, pNodes)
{
pSetObjs.push_back(SetObject(pNode, CaeSetObjectSubTypeNone, 0));
}
pSetMgr->SetTargetSetMembers(0, pSetObjs);
}
void ConstraintUpdater::ResolveConflicts(SimSimulation *pSim)
{
std::string strSol((boost::format(FIND_SOLUTION_PATTERN_NAME) % VSDANE_SOLUTION_NAME).str());
SimSolution * pSolution(dynamic_cast<SimSolution*>(pSim->FindObject(strSol)));
try
{
pSim->SetActiveSolution(pSolution);
Session *pSession = Session::GetSession();
Session::UndoMarkId undoMark = pSession->SetUndoMark(Session::MarkVisibilityInvisible, "Resolve Constraint Conflicts");
int nErrs = pSession->UpdateManager()->DoUpdate(undoMark);
pSolution->ResolveConstraintConflicts();
}
catch (NXException& ex)
{
ex.Message();
}
}
BaseSolveOperation::BaseSolveOperation() : m_workDir(), m_solDir()
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
m_solDir = filesystem::path(pPrjProp->GetProjectPath());
}
BaseSolveOperation::~BaseSolveOperation()
{
}
void BaseSolveOperation::Execute()
{
//PreExecute();
filesystem::path oldWorkPath(filesystem::current_path());
CreateWorkDir();
// remove work dir
BOOST_SCOPE_EXIT((&m_workDir)(&oldWorkPath))
{
filesystem::current_path(oldWorkPath);
#if !defined(_DEBUG) && !defined(DEBUG)
try
{
if (filesystem::exists(m_workDir))
filesystem::remove_all(m_workDir);
}
catch (std::exception &)
{
}
#endif
m_workDir.clear();
}
BOOST_SCOPE_EXIT_END
CleanResult();
PreExecute();
// Solve
Solve();
if (CanAutoLoadResult())
{
LoadResult();
// save parts
CAE::SimPart *pSimPrt = Project::Instance()->GetProperty()->GetSimPart();
pSimPrt->Save(BasePart::SaveComponentsTrue, BasePart::CloseAfterSaveFalse);
}
}
void BaseSolveOperation::CleanResultFile(const std::string &resultPathName)
{
try
{
if (filesystem::exists(resultPathName))
{
try
{
Session::GetSession()->DataManager()->UnloadFile(resultPathName.c_str());
}
catch(NXException&) // maybe the file is not loaded, delete it anyway
{
}
filesystem::remove_all(resultPathName);
}
}
catch (std::exception &)
{
throw NXException::Create("Failed to clean old result.");
}
}
void BaseSolveOperation::CreateWorkDir()
{
filesystem::path strScratchDir(m_solDir); // Use custom scratch dir in future
filesystem::path workFolder;
filesystem::path workPath;
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
std::string workFolderTemp(pPrjProp->GetProjectName() + "_%%%%%%");
// set work dir
filesystem::current_path(strScratchDir);
do
{
workFolder = filesystem::unique_path(workFolderTemp);
workPath = strScratchDir / workFolder;
} while (filesystem::exists(workPath));
filesystem::create_directory(workPath);
m_workDir = workPath;
}
SolveResponseOperation::SolveResponseOperation() : BaseSolveOperation()
{
}
SolveResponseOperation::~SolveResponseOperation()
{
}
void SolveResponseOperation::PreExecute()
{
ComputeExcitationTask computeExcitation(this);
computeExcitation.Run();
ConvertExcitationTask convertExcitation(this);
convertExcitation.Run();
}
void SolveResponseOperation::CleanResult()
{
ResponseOp2Result respResult;
CleanResultFile(respResult.GetResultPathName());
ResponseAfuResult respAfuResult(m_workDir.string());
CleanResultFile(respAfuResult.GetResultPathName());
NoiseIntermResult noiseIntermResult;
CleanResultFile(noiseIntermResult.GetResultPathName());
}
void SolveResponseOperation::Solve()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
SimPart *pSimPart = pPrjProp->GetSimPart();
SimSimulation *pSim = pSimPart->Simulation();
std::string strSol((boost::format(FIND_SOLUTION_PATTERN_NAME) % VSDANE_SOLUTION_NAME).str());
SimSolution * pSolution(dynamic_cast<SimSolution*>(pSim->FindObject(strSol)));
pSolution->Solve(SimSolution::SolveOptionSolve,
SimSolution::SetupCheckOptionDoNotCheck);
}
bool SolveResponseOperation::CanAutoLoadResult() const
{
return true;
}
void SolveResponseOperation::LoadResult()
{
ResponseOp2Result respResult;
respResult.Load();
// Get dat path name
ResponseAfuResult respAfuResult(m_workDir.string());
respAfuResult.Create();
// may don't have noise intermediate result
NoiseIntermResult noiseIntermResult;
try
{
noiseIntermResult.Create();
}
catch (NXException &)
{
// does nothing if failed to extract noise intermediate results, the empty result has been deleted
}
// modify project status
if (noiseIntermResult.IsResultExist())
{
FTK::DataManager *pDataMgr = Session::GetSession()->DataManager();
pDataMgr->UnloadFile(noiseIntermResult.GetResultPathName().c_str());
Project::GetStatus()->Switch(Status::ProjectStatus_ResponseNoiseSolved);
}
else if (respResult.IsResultExist() && respAfuResult.IsResultExist())
Project::GetStatus()->Switch(Status::ProjectStatus_ResponseSolved);
}
SolveNoiseOperation::SolveNoiseOperation(const std::vector<NXOpen::Point*> &pts)
: BaseSolveOperation(), m_outputPoints(pts)
{
}
SolveNoiseOperation::~SolveNoiseOperation()
{
}
void SolveNoiseOperation::PreExecute()
{
NoiseDatumPointsUpdater datumPtsUpdater;
datumPtsUpdater.Update();
// convert to FFT
NoiseInput noiseInput(m_workDir, m_outputPoints);
noiseInput.Generate();
}
void SolveNoiseOperation::CleanResult()
{
NoiseResult respResult(m_workDir, m_outputPoints);
CleanResultFile(respResult.GetResultPathName());
}
void SolveNoiseOperation::Solve()
{
filesystem::path exePathName = filesystem::path(GetInstallPath()) /
SOLVER_FOLDER_NAME / SOLVER_NOISE_EXE_NAME;
// set work dir
filesystem::current_path(GetWorkDir());
int rtc = std::system(exePathName.string().c_str());
filesystem::path failLogPath = GetWorkDir() / SOLVE_NOISE_FAIL_LOG_NAME;
if (rtc != 0 || filesystem::exists(failLogPath))
throw NXException::Create("Failed to solve noise.");
}
bool SolveNoiseOperation::CanAutoLoadResult() const
{
return true;
}
void SolveNoiseOperation::LoadResult()
{
NoiseResult noiseResult(m_workDir, m_outputPoints);
noiseResult.Create();
// modify project status
if (noiseResult.IsResultExist())
Project::GetStatus()->Switch(Status::ProjectStatus_NoiseSolved);
}
BaseTask::BaseTask(const BaseSolveOperation *solOper) : m_solOper(solOper)
{
}
BaseTask::~BaseTask()
{
}
void BaseTask::Run()
{
CleanResults();
PrepareInput();
CallExecutable();
MoveOutputs();
}
void BaseTask::CleanResults() const
{
std::vector<std::string> results(GetOutputResults());
for (std::vector<std::string>::iterator iter = results.begin(); iter != results.end(); ++iter)
{
filesystem::path resultPath = m_solOper->GetSolutionDir() / *iter;
if (filesystem::exists(resultPath))
filesystem::remove_all(resultPath);
}
}
void BaseTask::CallExecutable() const
{
filesystem::path exePathName = filesystem::path(GetInstallPath()) /
SOLVER_FOLDER_NAME / GetExecutableName();
// set work dir
filesystem::current_path(m_solOper->GetWorkDir());
int rtc = std::system(exePathName.string().c_str());
// post solve check
filesystem::path failLogPath = m_solOper->GetWorkDir() / GetFailLog();
if (rtc != 0 || filesystem::exists(failLogPath))
throw NXException::Create("Failed to solve excitation.");
}
void BaseTask::MoveOutputs() const
{
std::vector<std::string> results(GetOutputResults());
for (std::vector<std::string>::iterator iter = results.begin(); iter != results.end(); ++iter)
{
filesystem::path srcPath = m_solOper->GetWorkDir() / *iter;
filesystem::path dstPath = m_solOper->GetSolutionDir() / *iter;
if (filesystem::exists(srcPath))
filesystem::copy_file(srcPath, dstPath);
else
throw NXException::Create("Failed to solve.");
}
}
ComputeExcitationTask::ComputeExcitationTask(const BaseSolveOperation *solOper) : BaseTask(solOper)
{
}
ComputeExcitationTask::~ComputeExcitationTask()
{
}
void ComputeExcitationTask::PrepareInput()
{
ExcitationInput excitationInput(m_solOper->GetWorkDir());
excitationInput.Generate();
}
std::string ComputeExcitationTask::GetExecutableName() const
{
return SOLVER_ELASTIC_EXE_NAME;
}
std::string ComputeExcitationTask::GetSuccessLog() const
{
return SOLVE_ELASTIC_SUCCESS_LOG_NAME;
}
std::string ComputeExcitationTask::GetFailLog() const
{
return SOLVE_ELASTIC_FAIL_LOG_NAME;
}
std::vector<std::string> ComputeExcitationTask::GetOutputResults() const
{
std::vector<std::string> results;
//results.reserve(3);
//results.push_back(VEHICLE_OUTPUT_FILE_NAME);
//results.push_back(WHEEL_OUTPUT_FILE_NAME);
//results.push_back(TURN_OUTPUT_FILE_NAME);
return results;
}
ConvertExcitationTask::ConvertExcitationTask(const BaseSolveOperation *solOper) : BaseTask(solOper), m_nodeOffset(0)
{
}
ConvertExcitationTask::~ConvertExcitationTask()
{
}
FEModelOccurrence* ConvertExcitationTask::GetRailFEModelOcc() const
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
SimPart *pSimPart = pPrjProp->GetSimPart();
FEModelOccurrence *pSimFEModel = pSimPart->Simulation()->Femodel();
std::vector<FEModelOccurrence*> feModelChildren(pSimFEModel->GetChildren());
if (feModelChildren.size() == 2)
{
for (unsigned int idx = 0; idx < feModelChildren.size(); idx++)
{
std::string strName = feModelChildren[idx]->Name().GetText();
bool isOcc = feModelChildren[idx]->IsOccurrence();
feModelChildren[idx]->Print();
std::string strID = feModelChildren[idx]->JournalIdentifier().GetText();
BasePart *pPrt = feModelChildren[idx]->OwningPart();
Assemblies::Component* pComp = feModelChildren[idx]->OwningComponent();
IFEModel *pParentModel = feModelChildren[idx]->Parent();
}
//feModelChildren[0]->SetLabelOffsets(0, 0, 0);
//feModelChildren[1]->SetLabelOffsets(GetMaxNodeLabel(feModelChildren[0]->FenodeLabelMap()),
// GetMaxElementLabel(feModelChildren[0]->FeelementLabelMap()), 0);
}
return pSimFEModel;
}
std::vector<tag_t> ConvertExcitationTask::GetRailNodes()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
Mesh *pRailMesh = NULL;
std::string strRailMeshName((boost::format(FIND_MESH_OCC_PATTERN_NAME) % RAIL_MESH_NAME).str());
SimPart *pSimPart = pPrjProp->GetSimPart();
FEModelOccurrence *pSimFEModel = pSimPart->Simulation()->Femodel();
std::vector<FEModelOccurrence*> childFeModelOcc(pSimFEModel->GetChildren());
for (std::vector<FEModelOccurrence*>::iterator iter = childFeModelOcc.begin();
iter != childFeModelOcc.end(); ++iter)
{
IMeshManager *pMeshMgr = (*iter)->MeshManager();
try
{
pRailMesh = polymorphic_cast<Mesh*>(pMeshMgr->FindObject(strRailMeshName.c_str()));
int elemOffset = 0;
int csysOffset = 0;
(*iter)->GetLabelOffsets(&m_nodeOffset, &elemOffset, &csysOffset);
break;
}
catch (std::exception&)
{
}
}
//FEModelOccurrence *pRailFEModelOcc = GetFEModelOccByMeshName(RAIL_MESH_NAME);
//int elemOffset = 0;
//int csysOffset = 0;
//pRailFEModelOcc->GetLabelOffsets(&m_nodeOffset, &elemOffset, &csysOffset);
#if 0
FemPart *pFemPart = pPrjProp->GetRailSlabFemPart();
IMeshManager *pMeshMgr = pFemPart->BaseFEModel()->MeshManager();
std::string strRailMeshName(std::string("Mesh[").append(RAIL_MESH_NAME).append("]"));
pRailMesh = polymorphic_cast<Mesh*>(pMeshMgr->FindObject(strRailMeshName.c_str()));
#endif
int nodeCnt = 0;
int errCode = 0;
tag_t *tNodes = NULL;
errCode = UF_SF_locate_nodes_on_mesh(pRailMesh->Tag(), &nodeCnt, &tNodes);
if (errCode != 0)
throw NXException::Create(errCode);
boost::shared_ptr<tag_t> pNodes(tNodes, UF_free);
return std::vector<tag_t>(tNodes, tNodes + nodeCnt);
}
class NodePosZComparer : public std::binary_function<tag_t, tag_t, bool>
{
public:
NodePosZComparer()
{
}
~NodePosZComparer()
{
}
bool operator () (tag_t tNode1, tag_t tNode2) const
{
FENode *pNode = NULL;
pNode = dynamic_cast<FENode*>(NXObjectManager::Get(tNode1));
Point3d absPos1(pNode->Coordinates());
pNode = dynamic_cast<FENode*>(NXObjectManager::Get(tNode2));
Point3d absPos2(pNode->Coordinates());
return absPos1.Z < absPos2.Z;
}
};
void ConvertExcitationTask::WriteInputData(std::vector<tag_t> &railNodes) const
{
std::ofstream inputFile(filesystem::path(m_solOper->GetWorkDir() /
CONVERT_EXCITATION_INPUT_FILE_NAME).string().c_str());
for (std::vector<tag_t>::iterator iter = railNodes.begin(); iter != railNodes.end(); ++iter)
{
FENode *pNode = dynamic_cast<FENode*>(NXObjectManager::Get(*iter));
// has no need to add offset if input rail node is occurrence
int label = pNode->Label()/* + m_nodeOffset*/;
inputFile << label << std::endl;
}
}
void ConvertExcitationTask::PrepareInput()
{
std::vector<tag_t> railNodes(GetRailNodes());
std::sort(railNodes.begin(), railNodes.end(), NodePosZComparer());
WriteInputData(railNodes);
}
std::string ConvertExcitationTask::GetExecutableName() const
{
return SOLVER_ELASTIC_CONVERT_EXE_NAME;
}
std::string ConvertExcitationTask::GetSuccessLog() const
{
return "";
}
std::string ConvertExcitationTask::GetFailLog() const
{
return SOLVE_CONVERT_ELASTIC_FAIL_LOG_NAME;
}
std::vector<std::string> ConvertExcitationTask:: GetOutputResults() const
{
std::vector<std::string> results;
results.reserve(3);
results.push_back("force.dat");
results.push_back("moment.dat");
results.push_back("dload.dat");
return results;
}
void ExcitationInput::Generate() const
{
CopyIrrData();
WriteVehicleData();
WriteRailData();
WriteSlabData();
WriteBeamData();
WriteCalculationData();
}
void ExcitationInput::CopyIrrData() const
{
filesystem::path irrTmpPath = filesystem::path(GetInstallPath()) /
SOLVER_FOLDER_NAME / SOLVER_DATA_FOLDER_NAME / IRR_DATA_FILE_NAME;
filesystem::path toFilePathName(m_targetDir / IRR_DATA_FILE_NAME);
if (filesystem::exists(toFilePathName))
filesystem::remove_all(toFilePathName);
filesystem::copy_file(irrTmpPath, toFilePathName);
}
void ExcitationInput::WriteInputData(const StlInputItemVector &vInputItems, const std::string &fileName) const
{
if (vInputItems.empty())
return;
std::ofstream inputFile(filesystem::path(m_targetDir / fileName).string().c_str());
for (unsigned int idx = 0; idx < vInputItems.size(); idx++)
{
double expVal = 0.0;
try
{
Expression *pExp = BaseComponent::GetExpression(vInputItems[idx].m_partName,
vInputItems[idx].m_expName);
std::string strExpType = pExp->Type().GetText();
if (strExpType == "Number")
expVal = pExp->GetValueUsingUnits(Expression::UnitsOptionExpression); //pExp->Value();
else if (strExpType == "Integer")
expVal = pExp->IntegerValue();
// Convert to SI Unit System
if (!vInputItems[idx].m_targetUnitName.empty())
{
Session *pSession = Session::GetSession();
BasePart *pExpPrt = pSession->Parts()->FindObject(vInputItems[idx].m_partName.c_str());
UnitCollection *pUnitCol = pExpPrt->UnitCollection();
Unit *pSiUnit = pUnitCol->FindObject(vInputItems[idx].m_targetUnitName.c_str());
expVal = pUnitCol->Convert(pExp->Units(), pSiUnit, expVal);
}
}
catch (NXException &)
{
expVal = lexical_cast<double>(vInputItems[idx].m_expName);
}
inputFile << expVal << ",";
}
inputFile.seekp(-1, std::ios_base::cur);
inputFile << "\n";
inputFile.close();
}
void ExcitationInput::WriteVehicleData() const
{
InputItem inputDataItem[] =
{
{TRAIN_PRT_PART_NAME, TRAIN_CARRIAGE_COUNT_EXP_NAME, ""},
{CARRIAGE_PRT_PART_NAME, TRAIN_CARRIAGE_WEIGHT_EXP_NAME, UF_UNIT_MASS_kg},
{CARRIAGE_PRT_PART_NAME, TRAIN_CARRIAGE_LENGTH_EXP_NAME, UF_UNIT_LENGTH_m},
{CARRIAGE_PRT_PART_NAME, TRAIN_CARRIAGE_DISTANCE_EXP_NAME, UF_UNIT_LENGTH_m},
{CARRIAGE_PRT_PART_NAME, TRAIN_BOGIE_WEIGHT_EXP_NAME, UF_UNIT_MASS_kg},
{CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_WEIGHT_EXP_NAME, UF_UNIT_MASS_kg},
{CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_SGL_STG_SUSP_STIFF_EXP_NAME, UF_UNIT_PRESSUREONEDGE_N__m},
{CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_SGL_STG_SUSP_DAMP_EXP_NAME, UF_UNIT_MASSFLOW_kg__sec},
{CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_TWO_STG_SUSP_STIFF_EXP_NAME, UF_UNIT_PRESSUREONEDGE_N__m},
{CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_TWO_STG_SUSP_DAMP_EXP_NAME, UF_UNIT_MASSFLOW_kg__sec},
{CARRIAGE_PRT_PART_NAME, TRAIN_HALF_BOGIE_DISTANCE_EXP_NAME, UF_UNIT_LENGTH_m},
{CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_INTERVAL_EXP_NAME, UF_UNIT_LENGTH_m}
};
StlInputItemVector vInputItems(inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
WriteInputData(vInputItems, VEHICLE_INPUT_FILE_NAME);
}
void ExcitationInput::WriteRailData() const
{
InputItem inputDataItem[] =
{
{RAIL_SLAB_FEM_PART_NAME, RAIL_ELASTIC_MODULUS_EXP_NAME, UF_UNIT_PRESSURE_N__m2},
{RAIL_SLAB_FEM_PART_NAME, RAIL_LINEAR_DENSITY_EXP_NAME, UF_UNIT_MASSPERLENGTH_kg__m},
//{RAIL_SLAB_FEM_PART_NAME, "0", UF_UNIT_AREA_m2},
{RAIL_SLAB_FEM_PART_NAME, RAIL_SECTION_INERTIA_EXP_NAME, UF_UNIT_MOMENT_OF_INERTIA_m4},
{RAIL_SLAB_FEM_PART_NAME, RAIL_ELEMENT_SIZE_EXP_NAME, UF_UNIT_LENGTH_m}
};
StlInputItemVector vInputItems(inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
WriteInputData(vInputItems, RAIL_INPUT_FILE_NAME);
}
void ExcitationInput::WriteSlabData() const
{
InputItem inputDataItem[] =
{
{SLABS_PRT_PART_NAME, SLAB_COUNT_EXP_NAME, ""},
{SLAB_PRT_PART_NAME, SLAB_LENGTH_EXP_NAME, UF_UNIT_LENGTH_m},
{RAIL_SLAB_FEM_PART_NAME, RAIL_ELEMENT_SIZE_EXP_NAME, UF_UNIT_LENGTH_m},
{RAIL_SLAB_FEM_PART_NAME, SLAB_MASS_RATIO_EXP_NAME, ""},
{RAIL_SLAB_FEM_PART_NAME, SLAB_FASTENER_STIFFNESS_EXP_NAME, UF_UNIT_PRESSUREONEDGE_N__m},
{RAIL_SLAB_FEM_PART_NAME, SLAB_FASTENER_DAMPING_EXP_NAME, UF_UNIT_MASSFLOW_kg__sec},
{RAIL_SLAB_FEM_PART_NAME, SLAB_STIFFNESS_RATIO_EXP_NAME, ""},
{RAIL_SLAB_FEM_PART_NAME, SLAB_DAMPING_RATIO_EXP_NAME, ""},
{RAIL_SLAB_FEM_PART_NAME, SLAB_ELASTIC_MODULUS_EXP_NAME, UF_UNIT_PRESSURE_N__m2},
{RAIL_SLAB_FEM_PART_NAME, SLAB_SECTION_INERTIA_EXP_NAME, UF_UNIT_MOMENT_OF_INERTIA_m4},
{SLAB_PRT_PART_NAME, SLAB_SUPPORT_COUNT_EXP_NAME, ""}
};
StlInputItemVector vInputItems(inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
WriteInputData(vInputItems, SLAB_INPUT_FILE_NAME);
}
void ExcitationInput::WriteBeamData() const
{
// Bridge
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
StlInputItemVector vInputItems;
switch (pPrjProp->GetProjectType())
{
case Project::ProjectType_Bridge:
{
InputItem inputDataItem[] =
{
{BRIDGE_FEM_PART_NAME, BRIDGE_ELASTIC_MODULUS_EXP_NAME, UF_UNIT_PRESSURE_N__m2},
{BRIDGE_FEM_PART_NAME, BRIDGE_MASS_DENSITY_EXP_NAME, UF_UNIT_MASSDENSITY_kg__m3},
{BEAM_PRT_PART_NAME, SECTION_AREA_EXP_NAME, UF_UNIT_AREA_m2},
{BRIDGE_FEM_PART_NAME, BRIDGE_SECTION_INERTIA_EXP_NAME, UF_UNIT_MOMENT_OF_INERTIA_m4}/*,
{BEAM_PRT_PART_NAME, WIDTH_EXP_NAME, UF_UNIT_LENGTH_m}*/
};
vInputItems.insert(vInputItems.end(), inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
}
break;
case Project::ProjectType_Selmi_Infinite:
{
InputItem inputDataItem[] =
{
{BASE_FEM_PART_NAME, BASE_ELASTIC_MODULUS_EXP_NAME, UF_UNIT_PRESSURE_N__m2},
{BASE_FEM_PART_NAME, BASE_MASS_DENSITY_EXP_NAME, UF_UNIT_MASSDENSITY_kg__m3},
{BASE_PRT_PART_NAME, SECTION_AREA_EXP_NAME, UF_UNIT_AREA_m2},
{BASE_FEM_PART_NAME, BASE_SECTION_INERTIA_EXP_NAME, UF_UNIT_MOMENT_OF_INERTIA_m4}
};
vInputItems.insert(vInputItems.end(), inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
}
break;
case Project::ProjectType_Tunnel:
{
InputItem inputDataItem[] =
{
{TUNNEL_FEM_PART_NAME, TUNNEL_CONCRETE_ELASTIC_MODULUS_EXP_NAME, UF_UNIT_PRESSURE_N__m2},
{TUNNEL_FEM_PART_NAME, TUNNEL_CONCRETE_MASS_DENSITY_EXP_NAME, UF_UNIT_MASSDENSITY_kg__m3},
{TUNNEL_PRT_PART_NAME, SECTION_AREA_EXP_NAME, UF_UNIT_AREA_m2},
{TUNNEL_FEM_PART_NAME, TUNNEL_SECTION_INERTIA_EXP_NAME, UF_UNIT_MOMENT_OF_INERTIA_m4}
};
vInputItems.insert(vInputItems.end(), inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
}
break;
default:
break;
}
WriteInputData(vInputItems, BEAM_INPUT_FILE_NAME);
}
void ExcitationInput::WriteCalculationData() const
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
std::string strSimPrt(pPrjProp->GetProjectName().append("_s"));
InputItem inputDataItem[] =
{
{strSimPrt, TRAIN_SPEED_EXP_NAME},
{strSimPrt, COMPUTE_TIME_STEP_EXP_NAME}
};
StlInputItemVector vInputItems(inputDataItem, inputDataItem + N_ELEMENTS(inputDataItem));
WriteInputData(vInputItems, CALCULATION_INPUT_FILE_NAME);
}
class NodePosXZComparer : public std::binary_function<TaggedObject*, TaggedObject*, bool>
{
public:
NodePosXZComparer()
{
}
~NodePosXZComparer()
{
}
bool operator () (TaggedObject *pNode1, TaggedObject *pNode2) const
{
FENode *pNode = NULL;
pNode = dynamic_cast<FENode*>(pNode1);
Point3d absPos1(pNode->Coordinates());
pNode = dynamic_cast<FENode*>(pNode2);
Point3d absPos2(pNode->Coordinates());
return (absPos1.X > absPos2.X) || (absPos1.Z < absPos2.Z);
}
};
void NoiseInput::ConstructRefNodeSequence()
{
// Get all ref nodes
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
CaeGroup *pGroup = pSimPart->CaeGroups()->FindObject(NODES_FOR_NOISE_GROUP_NAME);
m_refNodeSeq = pGroup->GetEntities();
std::sort(m_refNodeSeq.begin(), m_refNodeSeq.end(), NodePosXZComparer());
}
void NoiseInput::Generate() const
{
WriteOutputPoints();
WriteFrequenceData();
}
void NoiseInput::WriteFrequenceData() const
{
std::string afuFileName(GetIntermediateResult());
// unload intermediate result
BOOST_SCOPE_EXIT((&afuFileName))
{
try
{
FTK::DataManager *pDataMgr = Session::GetSession()->DataManager();
pDataMgr->UnloadFile(afuFileName.c_str());
}
catch (std::exception &)
{
}
}
BOOST_SCOPE_EXIT_END
AfuManager *pAfuMgr = Session::GetSession()->AfuManager();
AfuDataConvertor *pAfuConvert = pAfuMgr->AfuDataConvertor();
std::vector<int> recordIndices(pAfuMgr->GetRecordIndexes(afuFileName.c_str()));
BOOST_FOREACH(int idx, recordIndices)
{
AfuData *pAfuData = NULL;
pAfuMgr->GetAfuData(afuFileName.c_str(), idx, &pAfuData);
std::string recordName(pAfuData->RecordName().GetText());
if(std::tr1::regex_search(recordName, std::tr1::regex("-Node-\\d+-Y$")))
{
std::vector<double> xValues, yValues;
yValues = pAfuData->GetRealData(xValues);
std::vector<double> freqVals, yReals, yImags;
yImags = pAfuConvert->GetFftFrequencyData(xValues, yValues, freqVals, yReals);
WriteRecord(recordName, freqVals, yReals, yImags);
}
}
}
void NoiseInput::WriteRecord(const std::string &recordName, const std::vector<double> &freqVals,
const std::vector<double> &yReals, const std::vector<double> &yImags) const
{
std::ofstream inputFile(filesystem::path(m_targetDir / GetTargetInputName(recordName)).string().c_str());
for (unsigned int idx = 0; idx < freqVals.size(); idx++)
{
// write values
inputFile << std::setw(15) << freqVals[idx] << " " <<
std::setw(15) << mmToMConvert * yReals[idx] << " " <<
std::setw(15) << mmToMConvert * yImags[idx] << std::endl;
}
}
std::string NoiseInput::GetIntermediateResult() const
{
NoiseIntermResult noiseIntermResult;
std::string intermResultPathName(noiseIntermResult.GetResultPathName());
if (filesystem::exists(intermResultPathName))
{
// Load it anyway
try
{
FTK::DataManager *pDataMgr = Session::GetSession()->DataManager();
pDataMgr->LoadFile(intermResultPathName.c_str());
}
catch (std::exception &)
{
}
}
else
noiseIntermResult.Create();
return intermResultPathName;
}
std::string NoiseInput::GetTargetInputName(const std::string &recordName) const
{
int nodeLabel = 0;
// Get Node Label
std::tr1::regex reg("-Node-(\\d+)-");
std::tr1::smatch what;
if(std::tr1::regex_search(recordName, what, reg) && what.size() == 2)
{
nodeLabel = boost::lexical_cast<int>(what[1]);
}
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
SimPart *pSimPart = pPrjProp->GetSimPart();
FEModelOccurrence *pSimFEModel = pSimPart->Simulation()->Femodel();
FEModelOccurrence *pRailFEModelOcc = GetFEModelOccByMeshName(pSimFEModel, RAIL_MESH_NAME);
boost::scoped_ptr<FENodeLabelMap> pRailSlabNodeLabelMap(pRailFEModelOcc->FenodeLabelMap());
FENode *pFENode = pRailSlabNodeLabelMap->GetNode(nodeLabel);
std::vector<TaggedObject*>::const_iterator iter = std::find(m_refNodeSeq.begin(), m_refNodeSeq.end(), pFENode);
if (iter == m_refNodeSeq.end())
throw NXException::Create("The response result is out of date. Please solve response first.");
int nodeIndex = static_cast<int>(iter - m_refNodeSeq.begin() + 1);
return (boost::format(NOISE_FREQUENCE_INPUT_FILE_NAME) % nodeIndex).str();
}
void NoiseInput::WriteOutputPoints() const
{
Point *pCenterPt = GetSlabCenter();
if (!pCenterPt)
throw NXException::Create("The slab center point has been deleted.");
Point3d centerCoord(pCenterPt->Coordinates());
std::ofstream inputFile(filesystem::path(m_targetDir / NOISE_COORDINATE_INPUT_FILE_NAME).string().c_str());
BOOST_FOREACH(Point *pOutputPt, m_outputPoints)
{
// convert the coordinate to first slab
Point3d coord(pOutputPt->Coordinates());
coord.Z = std::fmod(coord.Z, centerCoord.Z * 2);
// write relative coordinate
inputFile << std::setw(15) << mmToMConvert * (coord.X - centerCoord.X) << " " <<
std::setw(15) << mmToMConvert * (coord.Y - centerCoord.Y) << " " <<
std::setw(15) << mmToMConvert * (coord.Z - centerCoord.Z) << std::endl;
}
}
Point* NoiseInput::GetSlabCenter() const
{
NoiseDatumPointsUpdater datumPtsUpdater;
return datumPtsUpdater.GetSlabCenter();
}
SolveSettings::SolveSettings(bool bOutputElems, const std::vector<TaggedObject*> &outputElems,
bool bOutputNodes, const std::vector<TaggedObject*> &outputNodes,
bool bOutputNodesForNoise) : m_bOutputElems(bOutputElems), m_outputElems(outputElems),
m_bOutputNodes(bOutputNodes), m_outputNodes(outputNodes), m_bOutputNodesForNoise(bOutputNodesForNoise)
{
}
void SolveSettings::Apply()
{
SetRunJobInForeground();
SetResponseOutput();
SetNoiseOutput();
SetTimeStep();
CheckConstraints();
}
void SolveSettings::SetEntityGroup(const std::string &groupName,
const std::vector<TaggedObject*> &outputEntities)
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
CaeGroup *pGroup = pSimPart->CaeGroups()->FindObject(groupName);
pGroup->SetEntities(outputEntities);
}
void SolveSettings::SetRunJobInForeground()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
std::string strSol((boost::format(FIND_SOLUTION_PATTERN_NAME) % VSDANE_SOLUTION_NAME).str());
SimSimulation *pSim = pSimPart->Simulation();
SimSolution * pSolution(dynamic_cast<SimSolution*>(pSim->FindObject(strSol)));
PropertyTable *pPropTab = pSolution->PropertyTable();
pPropTab->SetBooleanPropertyValue("Foreground", true);
// set scratch dir
pPropTab = pSolution->SolverOptionsPropertyTable();
pPropTab->SetStringPropertyValue("sdirectory", pPrjProp->GetProjectPath());
}
void SolveSettings::SetResponseOutput()
{
if (m_bOutputElems)
SetEntityGroup(ELEMENT_FOR_RESPONSE_GROUP_NAME, m_outputElems);
if (m_bOutputNodes)
SetEntityGroup(NODE_FOR_RESPONSE_GROUP_NAME, m_outputNodes);
std::vector<OutputRequestItem> outputReqItems;
outputReqItems.reserve(3);
outputReqItems.push_back(OutputRequestItem("Acceleration - Enable", m_bOutputNodes));
outputReqItems.push_back(OutputRequestItem("Displacement - Enable", m_bOutputNodes));
outputReqItems.push_back(OutputRequestItem("Stress - Enable", m_bOutputElems));
OpenOutputRequests(RESPONSE_STRUCTURAL_OUTPUT_OBJECT_NAME, outputReqItems);
}
void SolveSettings::CheckConstraints()
{
ConstraintUpdater constraintUpdater;
constraintUpdater.Update();
}
void SolveSettings::SetNoiseOutput()
{
// Check Noise Datum Points
if (m_bOutputNodesForNoise)
{
NoiseDatumPointsUpdater datumPtsUpdater;
datumPtsUpdater.Update();
}
std::vector<OutputRequestItem> outputReqItems;
outputReqItems.push_back(OutputRequestItem("Velocity - Enable", m_bOutputNodesForNoise));
OpenOutputRequests(NOISE_STRUCTURAL_OUTPUT_OBJECT_NAME, outputReqItems);
}
void SolveSettings::OpenOutputRequests(const std::string &oObjName, const std::vector<OutputRequestItem> &outputReqItems)
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
std::string strModelingObj((boost::format(FIND_MODELING_OBJ_PATTERN_NAME) % oObjName.c_str()).str());
ModelingObjectPropertyTable *pModelingObjPT(pSimPart->ModelingObjectPropertyTables()->FindObject(strModelingObj));
PropertyTable *pPropTab = pModelingObjPT->PropertyTable();
BOOST_FOREACH(OutputRequestItem oReqItem, outputReqItems)
{
pPropTab->SetBooleanPropertyValue(oReqItem.get<0>().c_str(), oReqItem.get<1>());
}
}
void SolveSettings::SetTimeStep()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::SimPart *pSimPart = pPrjProp->GetSimPart();
std::string strModelingObj((boost::format(FIND_MODELING_OBJ_PATTERN_NAME) % TIME_STEP_OUTPUT_OBJECT_NAME).str());
ModelingObjectPropertyTable *pModelingObjPT(pSimPart->ModelingObjectPropertyTables()->FindObject(strModelingObj));
PropertyTable *pPropTab = pModelingObjPT->PropertyTable();
Expression *pExp = pSimPart->Expressions()->FindObject(NUM_OF_TIME_STEPS_EXP_NAME);
pPropTab->SetIntegerPropertyValue("Number of Time Steps", numeric_cast<int>(pExp->Value()));
}
}
//#pragma warning(pop)
<file_sep>/src/NXVsar/src/vsarint/VsarUI_SlabSettings.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\CAE\Response\VSAR-DEV\NXProject\AppRoot\application\SlabSettings.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: Joseph
// Version: NX 7.5
// Date: 05-08-2011 (Format: mm-dd-yyyy)
// Time: 14:00 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_SlabSettings.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <Vsar_Component.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Slab.hxx>
#include <Vsar_Names.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
namespace VsarUI
{
//static CompAttrInfo attrExpInfo[] =
//{
// {SLAB_SUPPORT_COUNT_ID_NAME, SLAB_PRT_PART_NAME, SLAB_SUPPORT_COUNT_EXP_NAME},
// {FASTENER_STIFFNESS_ID_NAME, RAIL_SLAB_FEM_PART_NAME, RAIL_FASTENER_STIFFNESS_EXP_NAME},
// {FASTENER_DAMPING_ID_NAME, RAIL_SLAB_FEM_PART_NAME, RAIL_FASTENER_DAMPING_EXP_NAME},
// {MASS_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_MASS_RATIO_EXP_NAME},
// {STIFFNESS_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_STIFFNESS_RATIO_EXP_NAME},
// {DAMPING_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_DAMPING_RATIO_EXP_NAME},
// {ELASTIC_MODULUS_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_ELASTIC_MODULUS_EXP_NAME},
// {POISSON_RATIO_ID_NAME, RAIL_SLAB_FEM_PART_NAME, SLAB_POISSON_RATIO_EXP_NAME},
//};
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
SlabSettings::SlabSettings() : BaseCompDialog("SlabSettings.dlx",
new Slab)
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
SlabSettings::~SlabSettings()
{
}
void SlabSettings::ShowDialog()
{
boost::scoped_ptr<SlabSettings> pRailSettingDlg(new SlabSettings());
try
{
// The following method shows the dialog immediately
pRailSettingDlg->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void SlabSettings::InitializeCb()
{
BaseCompDialog::InitializeCb();
try
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
//grpGeometry = pTopBlock->FindBlock("grpGeometry");
m_length = pTopBlock->FindBlock("length");
//m_eleSize = pTopBlock->FindBlock("eleSize");
//counts = pTopBlock->FindBlock("counts");
m_supportCnt = pTopBlock->FindBlock(SLAB_SUPPORT_COUNT_ID_NAME);
//grpMaterial = pTopBlock->FindBlock("grpMaterial");
m_fastenerStiffness = pTopBlock->FindBlock(FASTENER_STIFFNESS_ID_NAME);
m_fastenerDamping = pTopBlock->FindBlock(FASTENER_DAMPING_ID_NAME);
m_massRatio = pTopBlock->FindBlock(MASS_RATIO_ID_NAME);
m_stiffnessRatio = pTopBlock->FindBlock(STIFFNESS_RATIO_ID_NAME);
m_dampingRatio = pTopBlock->FindBlock(DAMPING_RATIO_ID_NAME);
m_elasticModulus = pTopBlock->FindBlock(ELASTIC_MODULUS_ID_NAME);
m_poissonRatio = pTopBlock->FindBlock(POISSON_RATIO_ID_NAME);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void SlabSettings::DialogShownCb()
{
try
{
//---- Enter your callback code here -----
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
//int SlabSettings::ApplyCb()
//{
// int errorCode = 0;
// try
// {
// //---- Enter your callback code here -----
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// errorCode = 1;
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
// return errorCode;
//}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int SlabSettings::UpdateCb(UIBlock* block)
{
try
{
if(block == m_length)
{
//---------Enter your code here-----------
}
//else if(block == m_eleSize)
//{
// //---------Enter your code here-----------
//}
//else if(block == counts)
//{
////---------Enter your code here-----------
//}
else if(block == m_supportCnt)
{
//---------Enter your code here-----------
}
else if(block == m_fastenerStiffness)
{
//---------Enter your code here-----------
}
else if(block == m_fastenerDamping)
{
//---------Enter your code here-----------
}
else if(block == m_massRatio)
{
//---------Enter your code here-----------
}
else if(block == m_stiffnessRatio)
{
//---------Enter your code here-----------
}
else if(block == m_dampingRatio)
{
//---------Enter your code here-----------
}
else if(block == m_elasticModulus)
{
//---------Enter your code here-----------
}
else if(block == m_poissonRatio)
{
//---------Enter your code here-----------
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
}
<file_sep>/src/NXVsar/src/vsarint/VsarUI_BaseCompDialog.cxx
#include <uf_defs.h>
#include <VsarUI_BaseCompDialog.hxx>
#include <boost/lexical_cast.hpp>
#include <boost/bind.hpp>
//#include <NXOpen/Session.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
//#include <NXOpen/Callback.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/Expression.hxx>
#include <NXOpen/BlockStyler_PropertyList.hxx>
#include <Vsar_Component.hxx>
#include <Vsar_Utils.hxx>
using namespace boost;
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace VsarUI
{
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
BaseCompDialog::BaseCompDialog(const std::string &dialogName, Vsar::BaseComponent *pComp)
: BaseDialog(dialogName), m_pComp(pComp)
{
try
{
}
catch(std::exception&)
{
//---- Enter your exception handling code here -----
throw;
}
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
BaseCompDialog::~BaseCompDialog()
{
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
void BaseCompDialog::InitializeCb()
{
try
{
m_pComp->LoadGeometryPart();
HandleExpressions(boost::bind(&BaseCompDialog::InitBlock, this, _1, _2));
m_pComp->OnInit();
}
catch(NXException&)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Initilize", NXMessageBox::DialogTypeError,
"The model is corrupted. Maybe the part has been modified by hand. \n"
"Please recreate a new project to continue.");
throw;
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
//void BaseCompDialog::DialogShownCb()
//{
// try
// {
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
//}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
int BaseCompDialog::ApplyCb()
{
int errorCode = 0;
Session::UndoMarkId undoMark;
try
{
if (m_pComp)
{
undoMark = s_theSession->SetUndoMark(Session::MarkVisibilityVisible, "Setting Vsar Component");
HandleExpressions(boost::bind(&BaseCompDialog::WriteExpression, this, _1, _2));
m_pComp->UpdateModel();
}
}
catch(std::exception&)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Update Model", NXMessageBox::DialogTypeError,
"Failed to update model. The input parameter is incorrect or the model is corrupted.\n"
" Maybe the part has been modified by hand. \n");
s_theSession->UndoToLastVisibleMark();
}
return errorCode;
}
template <typename Handler>
void BaseCompDialog::HandleExpressions(Handler hander)
{
if (m_pComp)
{
const StlCompAttrInfoVector &attrInfos(m_pComp->GetAttrInfo());
StlCompAttrInfoVector::const_iterator iter;
Expression *pExpression = NULL;
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
UIBlock *pBlock = NULL;
for (iter = attrInfos.begin(); iter != attrInfos.end(); ++iter)
{
pExpression = m_pComp->GetExpression(iter->m_partName, iter->m_expName);
pBlock = pTopBlock->FindBlock(iter->m_attrName.c_str());
//(this->*hander)(pBlock, pExpression);
hander(pBlock, pExpression);
}
}
}
void BaseCompDialog::InitBlock( UIBlock * pBlock, Expression * pExpression )
{
if (pBlock)
{
boost::scoped_ptr<PropertyList> pPropList(pBlock->GetProperties());
std::string blockTypeName(pBlock->Type().GetUTF8Text());
if (blockTypeName == "Expression")
pPropList->SetTaggedObject("ExpressionObject", pExpression);
else if (blockTypeName == "Enumeration")
{
pPropList->SetEnumAsString("Value",
boost::lexical_cast<std::string>(pExpression->IntegerValue()).c_str());
}
else if (blockTypeName == "Integer")
pPropList->SetInteger("Value", pExpression->IntegerValue());
}
}
void BaseCompDialog::WriteExpression( UIBlock * pBlock, Expression * pExpression )
{
if (pBlock)
{
boost::scoped_ptr<PropertyList> pPropList(pBlock->GetProperties());
std::string blockTypeName(pBlock->Type().GetUTF8Text());
if (blockTypeName == "Enumeration")
{
pExpression->SetRightHandSide(pPropList->GetEnumAsString("Value").GetUTF8Text());
}
else if (blockTypeName == "Integer")
{
int ctrlValue = pPropList->GetInteger("Value");
pExpression->SetValue(ctrlValue);
}
}
}
//int BaseCompDialog::CancelCb()
//{
// int errorCode = 0;
// try
// {
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// errorCode = 1;
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
// return errorCode;
//}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
//int BaseCompDialog::UpdateCb(UIBlock* block)
//{
// try
// {
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
// return 0;
//}
}
<file_sep>/src/NXVsar/src/vsar/Vsar_Component.cxx
#include <uf_defs.h>
#include <Vsar_Component.hxx>
#include <algorithm>
#include <boost/scope_exit.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/shared_array.hpp>
#include <boost/bind.hpp>
#include <boost/lambda/lambda.hpp>
#include <boost/lambda/bind.hpp>
#include <boost/cast.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/foreach.hpp>
#include <NXOpen/Session.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/LogFile.hxx>
#include <NXOpen/PartCollection.hxx>
#include <NXOpen/Part.hxx>
#include <NXOpen/Expression.hxx>
#include <NXOpen/ExpressionCollection.hxx>
#include <NXOpen/Body.hxx>
#include <NXOpen/Point.hxx>
#include <NXOpen/Assemblies_ComponentAssembly.hxx>
#include <NXOpen/Assemblies_Component.hxx>
#include <NXOpen/ExpressionCollection.hxx>
#include <NXOpen/CAE_SimPart.hxx>
#include <NXOpen/CAE_FemPart.hxx>
#include <NXOpen/CAE_AssyFemPart.hxx>
#include <NXOpen/CAE_BaseFEModel.hxx>
#include <NXOpen/CAE_AssyFEModel.hxx>
#include <NXOpen/CAE_MeshManager.hxx>
#include <NXOpen/CAE_MeshCollector.hxx>
#include <NXOpen/CAE_CAEBody.hxx>
#include <NXOpen/CAE_CAEFace.hxx>
#include <NXOpen/CAE_FEModelOccurrence.hxx>
#include <NXOpen/CAE_FEElementLabelMap.hxx>
#include <NXOpen/CAE_FENodeLabelMap.hxx>
#include <NXOpen/CAE_Mesh.hxx>
#include <uf.h>
#include <uf_sf.h>
#include <Vsar_Project.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Utils.hxx>
using namespace boost;
using namespace NXOpen;
using namespace NXOpen::Assemblies;
using namespace NXOpen::CAE;
//using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
BaseComponent::BaseComponent(const CompAttrInfo *pCompAttrs, int compAttrCnt) :
m_compAttrs(pCompAttrs, pCompAttrs + compAttrCnt)
{
// set root sim part as display part
Project *pPrj = Project::Instance();
BaseProjectProperty *pPrjProp = pPrj->GetProperty();
PartLoadStatus *pPrtLoadStatus = NULL;
DELETE_CLASS_POINTER(pPrtLoadStatus);
Session::GetSession()->Parts()->SetDisplay(pPrjProp->GetSimPart(),
false, true, &pPrtLoadStatus);
}
BaseComponent::~BaseComponent()
{
}
void BaseComponent::LoadGeometryPart(bool onDemandLoad)
{
Project *pPrj = Project::Instance();
BaseProjectProperty *pPrjProp = pPrj->GetProperty();
CAE::AssyFemPart *pAfem = pPrjProp->GetAFemPart();
//pAfem->LoadFully();
bool loadAllGeometry = onDemandLoad ? HasGeometryDependency() : true;
// load idea part and geometry part
CAE::FemPart *pRailSlabFem = pPrjProp->GetRailSlabFemPart();
CAE::FemPart *pBraceFem = pPrjProp->GetBraceFemPart();
if (loadAllGeometry)
{
Part *pRailSab_i = pPrjProp->GetRailSlabIdeaPart();
pRailSab_i->LoadFully();
Part *pBrace_i = pPrjProp->GetBraceIdeaPart();
pBrace_i->LoadFully();
Part *pRootGeo = pPrjProp->GetGeometryPart();
pRootGeo->LoadFully();
}
}
Expression* BaseComponent::GetExpression(const std::string &attrName)
{
Expression *pExp = NULL;
for (StlCompAttrInfoVector::iterator iter = m_compAttrs.begin();
iter != m_compAttrs.end(); ++iter)
{
if (iter->m_attrName == attrName)
{
pExp = GetExpression(iter->m_partName, iter->m_expName);
break;
}
}
return pExp;
}
Expression* BaseComponent::GetExpression(const std::string &partName, const std::string &expName)
{
Session *pSession = Session::GetSession();
PartCollection *pPrtCol = pSession->Parts();
BasePart *pExpPrt = NULL;
Expression *pExp = NULL;
pExpPrt = pPrtCol->FindObject(partName.c_str());
if (pExpPrt)
{
pExp = pExpPrt->Expressions()->FindObject(expName.c_str());
}
return pExp;
}
bool BaseComponent::HasGeometryDependency() const
{
Project *pPrj = Project::Instance();
BaseProjectProperty *pPrjProp = pPrj->GetProperty();
// Load all the geometry parts
std::vector<std::string> fileNames;
bool needUpdate = false;
pPrjProp->GetTemplatePartFiles(fileNames);
for (StlCompAttrInfoVector::const_iterator iter = m_compAttrs.begin();
iter != m_compAttrs.end(); ++iter)
{
if (std::find(fileNames.begin(), fileNames.end(),
iter->m_partName + ".prt") != fileNames.end())
{
needUpdate = true;
break;
}
}
return needUpdate;
}
void BaseComponent::UpdateModel()
{
if (HasGeometryDependency())
{
Project *pPrj = Project::Instance();
BaseProjectProperty *pPrjProp = pPrj->GetProperty();
PartLoadStatus *pPrtLoadStatus = NULL;
DELETE_CLASS_POINTER(pPrtLoadStatus);
BOOST_SCOPE_EXIT((&pPrjProp)(&pPrtLoadStatus))
{
Session::GetSession()->Parts()->SetDisplay(pPrjProp->GetSimPart(),
false, true, &pPrtLoadStatus);
}
BOOST_SCOPE_EXIT_END
UpdateGeometryModel();
UpdateFEModel();
}
}
void BaseComponent::UpdateGeometryModel()
{
Project *pPrj = Project::Instance();
BaseProjectProperty *pPrjProp = pPrj->GetProperty();
//PartLoadStatus *pPrtLoadStatus = NULL;
//DELETE_CLASS_POINTER(pPrtLoadStatus);
PartCollection *pPrtCol = Session::GetSession()->Parts();
//pPrtCol->SetDisplay(pPrjProp->GetGeometryPart(),
// false, true, &pPrtLoadStatus);
// Load all the geometry parts
std::vector<std::string> fileNames;
pPrjProp->GetTemplatePartFiles(fileNames);
for (unsigned int idx = 0; idx < fileNames.size(); idx++)
{
BasePart *pPrt = pPrtCol->FindObject(fileNames[idx].c_str());
if (pPrt)
pPrt->LoadFully();
}
Session *pSession = Session::GetSession();
Session::UndoMarkId undoMarkId;
NXString undoMarkName("Update Geometry Model");
undoMarkId = pSession->SetUndoMark(Session::MarkVisibilityInvisible, undoMarkName);
Update *pUpdateMgr = pSession->UpdateManager();
pUpdateMgr->DoUpdate(undoMarkId);
pUpdateMgr->DoInterpartUpdate(undoMarkId);
pUpdateMgr->DoAssemblyConstraintsUpdate(undoMarkId);
pSession->DeleteUndoMark(undoMarkId, undoMarkName);
}
void BaseComponent::UpdateFEModel()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::FemPart *pRailSlabFem = pPrjProp->GetRailSlabFemPart();
bool needMerge = false;
if (CanUpdateRailSlabFEModel() || CanUpdateRailSlabConnection())
{
UpdateFECompModel(pRailSlabFem, boost::bind(&BaseComponent::UpdateRailSlabModel, this));
needMerge = true;
}
CAE::FemPart *pBraceFem = pPrjProp->GetBraceFemPart();
if (CanUpdateBraseFEModel() || CanUpdateBraseConnection())
{
UpdateFECompModel(pBraceFem, boost::bind(&BaseComponent::UpdateBraseModel, this));
needMerge = true;
}
CAE::BaseFemPart *pAssemFem = pPrjProp->GetAFemPart();
if (needMerge)
UpdateFECompModel(pAssemFem, boost::bind(&BaseComponent::UpdateAssembleModel, this));
}
template <typename UpdateCallback>
void BaseComponent::UpdateFECompModel(CAE::BaseFemPart *pFem, UpdateCallback updateCb)
{
PartLoadStatus *pPrtLoadStatus = NULL;
DELETE_CLASS_POINTER(pPrtLoadStatus);
Session *pSession = Session::GetSession();
pSession->Parts()->SetDisplay(pFem, false, true, &pPrtLoadStatus);
Session::UndoMarkId undoMark;
try
{
undoMark = pSession->SetUndoMark(Session::MarkVisibilityVisible, "Setting Vsar Component");
pFem->BaseFEModel()->UpdateFemodel();
updateCb();
pFem->BaseFEModel()->UpdateFemodel();
}
catch (std::exception &ex)
{
pSession->LogFile()->WriteLine(ex.what());
pSession->UndoToLastVisibleMark();
throw;
}
//pSession->DeleteUndoMark(undoMark, NULL);
}
void BaseComponent::UpdateRailSlabModel()
{
if (CanUpdateRailSlabFEModel())
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::FemPart *pRailSlabFem = pPrjProp->GetRailSlabFemPart();
std::vector<Body*> bodyOccs;
std::vector<Body*> tmpBodyOccs;
// Carriage Settings
//tmpBodyOccs = GetGeoModelOccs(pRailSlabFem, CARRIAGE_PART_NAME, CARRIAGE_BODY_NAME);
//bodyOccs.insert(bodyOccs.end(), tmpBodyOccs.begin(), tmpBodyOccs.end());
// Slab Settings
tmpBodyOccs = GetGeoModelOccs(pRailSlabFem, SLAB_PART_NAME, SLAB_BODY_NAME);
bodyOccs.insert(bodyOccs.end(), tmpBodyOccs.begin(), tmpBodyOccs.end());
SetFeGeometryData(pRailSlabFem, bodyOccs, true);
// delete slab meshes first in case of update error.
DeleteMeshesInCollector(pRailSlabFem->BaseFEModel(), SLAB_MESH_COLLECTOR_NAME);
UpdateSweptMesh(pRailSlabFem->BaseFEModel(), GetCaeBodies(tmpBodyOccs),
SLAB_MESH_COLLECTOR_NAME, SLAB_MESH_NAME,
SLAB_ELEMENT_SIZE_NAME);
}
if (CanUpdateRailSlabConnection())
UpdateRailSlabConnection();
if (CanUpdateRailSlabFEModel() || CanUpdateRailSlabConnection())
{
std::vector<Mesh*> meshToMergeNodes;
MergeDuplicateNodes(meshToMergeNodes);
}
}
void BaseComponent::UpdateBraseModel()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
if (CanUpdateBraseFEModel())
{
if (pPrjProp->GetProjectType() == Project::ProjectType_Bridge)
{
CAE::FemPart *pBaseFem = pPrjProp->GetBraceFemPart();
std::vector<Body*> bodyOccs;
bodyOccs = GetGeoModelOccs(pBaseFem, BRIDGE_BEAM_PART_NAME, BRIDGE_BEAM_BODY_NAME);
SetFeGeometryData(pBaseFem, bodyOccs, false);
UpdateSweptMesh(pBaseFem->BaseFEModel(), GetCaeBodies(bodyOccs),
BRIDGE_MESH_COLLECTOR_NAME, BRIDGE_MESH_NAME,
BRIDGE_ELEMENT_SIZE_NAME);
}
}
if (CanUpdateBraseConnection())
UpdateBaseSlabConnection();
if (CanUpdateBraseFEModel() || CanUpdateBraseConnection())
{
std::vector<Mesh*> meshToMergeNodes;
if (pPrjProp->GetProjectType() == Project::ProjectType_Tunnel)
{
CAE::BaseFEModel *pBaseFeModel = pPrjProp->GetBraceFemPart()->BaseFEModel();
meshToMergeNodes.push_back(GetMeshByName(pBaseFeModel,
FIND_MESH_PATTERN_NAME, TUNNEL_CONCRETE_MESH_NAME));
meshToMergeNodes.push_back(GetMeshByName(pBaseFeModel,
FIND_MESH_PATTERN_NAME, SLAB_BASE_CONNECTION_MESH_NAME));
}
MergeDuplicateNodes(meshToMergeNodes);
}
}
std::vector<Body*> BaseComponent::GetGeoModelOccs(FemPart *pFemPart, const std::string &bodyPrtName, const std::string &bodyName)
{
std::vector<Body*> bodyOccs;
if (pFemPart)
{
// Set fem part to display part
PartCollection *pPrtCol = Session::GetSession()->Parts();
//Part* pRootGeo = Project::Instance()->GetProperty()->GetGeometryPart();
Part* pRootGeo = pFemPart->IdealizedPart();
// get body part prototype
Part *pBodyPrt = polymorphic_cast<Part*>(pPrtCol->FindObject(bodyPrtName.c_str()));
// get body part occurrence
Component *pRootComp = pRootGeo->ComponentAssembly()->RootComponent();
std::vector<Assemblies::Component*> prtOccs(GetOccInCompTree(pRootComp, pBodyPrt));
std::vector<Body*> bodyProtos(GetBodyByName(pBodyPrt, bodyName));
for (unsigned int idx = 0; idx < prtOccs.size(); idx++)
{
for (unsigned int jdx = 0; jdx < bodyProtos.size(); jdx++)
{
Body *pBodyOcc = dynamic_cast<Body*>(prtOccs[idx]->FindOccurrence(bodyProtos[jdx]));
if (pBodyOcc)
bodyOccs.push_back(pBodyOcc);
}
}
}
return bodyOccs;
}
void BaseComponent::UpdateRailSlabConnection()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::FemPart *pFemPart = pPrjProp->GetRailSlabFemPart();
std::vector<TaggedObject*> railConnectPts;
std::vector<TaggedObject*> slabConnectPts;
railConnectPts = GetPointByLayer(pFemPart, RAIL_CONNECTION_POINT_LAYER);
slabConnectPts = GetPointByLayer(pFemPart, SLAB_CONNECT_TO_RAIL_POINT_LAYER);
Update1DConnection(pFemPart->BaseFEModel(), railConnectPts, slabConnectPts,
RAIL_SLAB_CONNECTION_NAME, RAIL_SLAB_CONNECTION_COLLECTOR_NAME, RAIL_SLAB_CONNECTION_MESH_NAME);
}
void BaseComponent::UpdateBaseSlabConnection()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::FemPart *pFemPart = pPrjProp->GetBraceFemPart();
std::vector<TaggedObject*> slabConnectPts;
std::vector<TaggedObject*> baseConnectPts;
std::vector<TaggedObject*> slabPartConnectPts;
std::vector<TaggedObject*> basePartConnectPts;
// get left side points
slabPartConnectPts = GetPointByLayer(pFemPart, SLAB_CONNECT_TO_BASE_LEFT_POINT_LAYER);
basePartConnectPts = GetPointByLayer(pFemPart, BASE_CONNECT_TO_SLAB_LEFT_POINT_LAYER);
slabConnectPts.insert(slabConnectPts.end(), slabPartConnectPts.begin(), slabPartConnectPts.end());
baseConnectPts.insert(baseConnectPts.end(), basePartConnectPts.begin(), basePartConnectPts.end());
// get right side points
slabPartConnectPts = GetPointByLayer(pFemPart, SLAB_CONNECT_TO_BASE_RIGHT_POINT_LAYER);
basePartConnectPts = GetPointByLayer(pFemPart, BASE_CONNECT_TO_SLAB_RIGHT_POINT_LAYER);
slabConnectPts.insert(slabConnectPts.end(), slabPartConnectPts.begin(), slabPartConnectPts.end());
baseConnectPts.insert(baseConnectPts.end(), basePartConnectPts.begin(), basePartConnectPts.end());
Update1DConnection(pFemPart->BaseFEModel(), slabConnectPts, baseConnectPts,
SLAB_BASE_CONNECTION_NAME, SLAB_BASE_CONNECTION_COLLECTOR_NAME, SLAB_BASE_CONNECTION_MESH_NAME);
}
void BaseComponent::SetFeGeometryData( FemPart * pFemPart, const std::vector<Body*> &bodyOccs, bool syncLines )
{
scoped_ptr<FemSynchronizeOptions> psyncData;
psyncData.reset(pFemPart->NewFemSynchronizeOptions());
psyncData->SetSynchronizeLinesFlag(syncLines);
psyncData->SetSynchronizePointsFlag(true);
pFemPart->SetGeometryData(FemPart::UseBodiesOptionSelectedBodies, bodyOccs, psyncData.get());
//pFemPart->BaseFEModel()->UpdateFemodel();
}
class FELabelUpdater
{
public:
FELabelUpdater()
{
}
~FELabelUpdater()
{
}
void Update();
protected:
int GetMaxElementLabel(FEElementLabelMap *pElemLabelMap) const;
int GetMaxNodeLabel(FENodeLabelMap *pNodeLabelMap) const;
private:
FEModelOccurrence* GetFEModelOcc(const IFEModel *pFEModel) const;
};
void FELabelUpdater::Update()
{
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
CAE::AssyFemPart *pAssemPart = pPrjProp->GetAFemPart();
AssyFEModel *pAssyFEModel = dynamic_cast<AssyFEModel*>(pAssemPart->BaseFEModel());
std::vector<FEModelOccurrence*> feModelChildren(pAssyFEModel->GetChildren());
if (feModelChildren.size() == 2)
{
feModelChildren[0]->SetLabelOffsets(0, 0, 0);
feModelChildren[1]->SetLabelOffsets(GetMaxNodeLabel(feModelChildren[0]->FenodeLabelMap()),
GetMaxElementLabel(feModelChildren[0]->FeelementLabelMap()), 0);
}
}
int FELabelUpdater::GetMaxElementLabel(FEElementLabelMap *pElemLabelMap) const
{
int maxLabel = 0;
int curLabel = 0;
for (int idx = 0; idx < pElemLabelMap->NumElements(); idx++)
{
curLabel = pElemLabelMap->AskNextElementLabel(curLabel);
if (curLabel > maxLabel)
maxLabel = curLabel;
}
return maxLabel;
}
int FELabelUpdater::GetMaxNodeLabel(FENodeLabelMap *pNodeLabelMap) const
{
int maxLabel = 0;
int curLabel = 0;
for (int idx = 0; idx < pNodeLabelMap->NumNodes(); idx++)
{
curLabel = pNodeLabelMap->AskNextNodeLabel(curLabel);
if (curLabel > maxLabel)
maxLabel = curLabel;
}
return maxLabel;
}
void BaseComponent::UpdateAssembleModel()
{
// Update Node and Element offset anyway
// if (CanUpdateRailSlabFEModel())
{
FELabelUpdater feLabelUpdater;
feLabelUpdater.Update();
}
// merge duplicate nodes
std::vector<Mesh*> meshToMergeNodes;
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
if (pPrjProp->GetProjectType() == Project::ProjectType_Tunnel)
{
CAE::BaseFemPart *pBaseFem = pPrjProp->GetAFemPart();
AssyFEModel *pAFEModel = polymorphic_cast<AssyFEModel*>(pBaseFem->BaseFEModel());
FEModelOccurrence *pRailFeModelOcc = GetFEModelOccByMeshName(pAFEModel, RAIL_MESH_NAME);
// add to slab meshes to merge list
std::vector<Mesh*> slabMeshes(GetMeshesInCollector(pRailFeModelOcc,
FIND_MESH_COL_OCC_PATTERN_NAME, SLAB_MESH_COLLECTOR_NAME));
meshToMergeNodes.insert(meshToMergeNodes.end(), slabMeshes.begin(), slabMeshes.end());
IMeshManager *pMeshMgr = pRailFeModelOcc->MeshManager();
//std::string strMeshFindName((boost::format(FIND_MESH_COL_OCC_PATTERN_NAME) % SLAB_MESH_COLLECTOR_NAME).str());
//polymorphic_cast<IMeshCollector*>(pMeshMgr->FindObject(strMeshFindName.c_str()));
//meshToMergeNodes.push_back(GetMeshByName(pRailFeModelOcc,
// FIND_MESH_OCC_PATTERN_NAME, SLAB_MESH_NAME));
std::vector<FEModelOccurrence*> childFeModelOcc(pAFEModel->GetChildren());
std::vector<FEModelOccurrence*>::iterator iter = std::find_if(childFeModelOcc.begin(), childFeModelOcc.end(),
lambda::_1 != pRailFeModelOcc);
FEModelOccurrence *pBaseFeModelOcc = (iter != childFeModelOcc.end()) ? *iter : NULL;
// add slab-base connection mesh to merge list
meshToMergeNodes.push_back(GetMeshByName(pBaseFeModelOcc,
FIND_MESH_OCC_PATTERN_NAME, SLAB_BASE_CONNECTION_MESH_NAME));
}
MergeDuplicateNodes(meshToMergeNodes);
}
void BaseComponent::OnInit()
{
}
void BaseComponent::UpdateSweptMesh(IFEModel *pFeModel, const std::vector<CAEBody*> &pPolygonBodies,
const std::string &meshColName, const std::string &meshName,
const std::string &eleSizeExpName)
{
std::vector<CAEFace*> vCaeFaces;
MeshManager *pMeshMgr = polymorphic_cast<MeshManager*>(pFeModel->MeshManager());
std::string meshColFullName((boost::format(FIND_MESH_COL_PATTERN_NAME) % meshColName).str());
MeshCollector *pMeshCol = polymorphic_cast<MeshCollector*>(pMeshMgr->FindObject(meshColFullName.c_str()));
for (int idx = int(pPolygonBodies.size()) - 1; idx >= 0; idx--)
{
vCaeFaces = GetCaeFaceByName(pPolygonBodies[idx], FACE_NAME_TOP);
CAEFace *pTopFace = vCaeFaces.empty() ? NULL : vCaeFaces[0];
vCaeFaces = GetCaeFaceByName(pPolygonBodies[idx], FACE_NAME_BOTTOM);
CAEFace *pBottomFace = vCaeFaces.empty() ? NULL : vCaeFaces[0];
std::string curMeshName(meshName + "(" + boost::lexical_cast<std::string>((int)(pPolygonBodies.size()) - idx) + ")");
CreateSweptMesh(pMeshMgr, pMeshCol, curMeshName, pTopFace, pBottomFace, eleSizeExpName);
}
}
void BaseComponent::UpdateSweptMesh_sf(IFEModel *pFeModel, const std::vector<CAEBody*> &pPolygonBodies,
const std::string &meshColName, const std::string &meshName,
Expression *pEleSize)
{
std::vector<CAEFace*> vCaeFaces;
for (unsigned int idx = 0; idx < pPolygonBodies.size(); idx++)
{
vCaeFaces = GetCaeFaceByName(pPolygonBodies[idx], FACE_NAME_TOP);
CAEFace *pTopFace = vCaeFaces.empty() ? NULL : vCaeFaces[0];
CreateSweptMesh_sf(pFeModel, pPolygonBodies[idx], meshColName, meshName, pTopFace, pEleSize);
}
}
void BaseComponent::MergeDuplicateNodes(const std::vector<Mesh*> &meshToMergeNodes)
{
int numDuplicates = 0;
double tolerance = 0.001;
std::vector<tag_t> tMeshesToMearge(meshToMergeNodes.size());
for (int idx = 0; idx < (int)(meshToMergeNodes.size()); idx++)
{
tag_p_t tMeshPtrs = NULL;
int meshCnt;
UF_SF_locate_all_meshes(meshToMergeNodes[idx]->Tag(), &meshCnt, &tMeshPtrs);
boost::shared_array<tag_t> tMeshPtrArray(tMeshPtrs, UF_free);
if (meshCnt > 0)
tMeshesToMearge[idx] = tMeshPtrs[0];
}
//std::transform(meshToMergeNodes.begin(), meshToMergeNodes.end(),
// tMeshesToMearge.begin(), boost::bind(&Mesh::Tag, _1));
int iErr = UF_SF_check_model_duplicate_nodes(static_cast<int>(tMeshesToMearge.size()),
tMeshesToMearge.empty() ? NULL_TAG : &tMeshesToMearge[0],
true, tolerance, &numDuplicates);
if (iErr != 0)
throw NXException::Create(iErr);
}
bool BaseComponent::CanUpdateRailSlabFEModel() const
{
return false;
}
bool BaseComponent::CanUpdateBraseFEModel() const
{
return false;
}
bool BaseComponent::CanUpdateRailSlabConnection() const
{
return false;
}
bool BaseComponent::CanUpdateBraseConnection() const
{
return false;
}
}
<file_sep>/src/NXVsar/src/vsar/Vsar_Train.cxx
#include <uf_defs.h>
#include <Vsar_Train.hxx>
//#include <boost/filesystem.hpp>
//#include <boost/shared_ptr.hpp>
//#include <boost/bind.hpp>
#include <boost/cast.hpp>
//#include <NXOpen/Session.hxx>
//#include <NXOpen/NXException.hxx>
//#include <NXOpen/PartCollection.hxx>
//#include <NXOpen/Part.hxx>
//#include <NXOpen/Assemblies_ComponentAssembly.hxx>
//#include <NXOpen/Assemblies_Component.hxx>
//#include <NXOpen/ExpressionCollection.hxx>
#include <NXOpen/Expression.hxx>
//#include <NXOpen/Body.hxx>
//#include <NXOpen/Point.hxx>
//#include <NXOpen/BodyCollection.hxx>
//#include <NXOpen/CAE_FemPart.hxx>
//#include <NXOpen/CAE_FEModel.hxx>
#include <Vsar_Names.hxx>
//#include <Vsar_Project.hxx>
//#include <Vsar_Utils.hxx>
#include <Vsar_Init_Utils.hxx>
using namespace boost;
using namespace NXOpen;
//using namespace NXOpen::CAE;
//using namespace NXOpen::Assemblies;
//using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
static CompAttrInfo attrExpInfo[] =
{
{TRAIN_CARRIAGE_COUNT_ID_NAME, TRAIN_PRT_PART_NAME, TRAIN_CARRIAGE_COUNT_EXP_NAME},
//{"carriageLength", CARRIAGE_PRT_PART_NAME, "Length"},
{TRAIN_CARRIAGE_DISTANCE_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_CARRIAGE_DISTANCE_EXP_NAME},
{TRAIN_CARRIAGE_WEIGHT_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_CARRIAGE_WEIGHT_EXP_NAME},
{TRAIN_BOGIE_LENGTH_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_BOGIE_LENGTH_EXP_NAME},
{TRAIN_HALF_BOGIE_DISTANCE_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_HALF_BOGIE_DISTANCE_EXP_NAME},
{TRAIN_BOGIE_WEIGHT_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_BOGIE_WEIGHT_EXP_NAME},
{TRAIN_WHEELSET_INTERVAL_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_INTERVAL_EXP_NAME},
{TRAIN_WHEELSET_WEIGHT_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_WEIGHT_EXP_NAME},
{TRAIN_WHEELSET_SGL_STG_SUSP_STIFF_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_SGL_STG_SUSP_STIFF_EXP_NAME},
{TRAIN_WHEELSET_SGL_STG_SUSP_DAMP_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_SGL_STG_SUSP_DAMP_EXP_NAME},
{TRAIN_WHEELSET_TWO_STG_SUSP_STIFF_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_TWO_STG_SUSP_STIFF_EXP_NAME},
{TRAIN_WHEELSET_TWO_STG_SUSP_DAMP_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_TWO_STG_SUSP_DAMP_EXP_NAME},
{TRAIN_WHEELSET_CONTACT_COEF_ELAST_ID_NAME, CARRIAGE_PRT_PART_NAME, TRAIN_WHEELSET_CONTACT_COEF_ELAST_EXP_NAME}
};
Train::Train() : BaseComponent(attrExpInfo, N_ELEMENTS(attrExpInfo)),
m_oldCarriageCount(0)
{
}
Train::~Train()
{
}
int Train::GetCarriageCount() const
{
Expression *pCarriageCntExp = NULL;
if (!m_compAttrs.empty())
pCarriageCntExp = GetExpression(m_compAttrs[0].m_partName, m_compAttrs[0].m_expName);
return pCarriageCntExp ? numeric_cast<int>(pCarriageCntExp->Value()) : 0;
}
//void Train::UpdateRailSlabModel()
//{
// int carriageCnt = GetCarriageCount();
// if (carriageCnt == m_oldCarriageCount)
// return;
// BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
// CAE::FemPart *pRailSlabFem = pPrjProp->GetRailSlabFemPart();
// if (carriageCnt > m_oldCarriageCount)
// {
// std::vector<Body*> bodyOccs;
// std::vector<Body*> tmpBodyOccs;
// // Carriage Settings
// tmpBodyOccs = GetGeoModelOccs(pRailSlabFem, CARRIAGE_PART_NAME, CARRIAGE_BODY_NAME);
// bodyOccs.insert(bodyOccs.end(), tmpBodyOccs.begin(), tmpBodyOccs.end());
// // Slab Settings
// tmpBodyOccs = GetGeoModelOccs(pRailSlabFem, SLAB_PART_NAME, SLAB_BODY_NAME);
// bodyOccs.insert(bodyOccs.end(), tmpBodyOccs.begin(), tmpBodyOccs.end());
// SetFeGeometryData(pRailSlabFem, bodyOccs, true);
// //std::string meshName = std::string("Mesh[").append(SLAB_MESH_NAME).append("]");
// //EditSweptMeshData(pRailSlabFem->BaseFEModel(), meshName, tmpBodyOccs);
// UpdateSweptMesh(pRailSlabFem->BaseFEModel(), GetCaeBodies(tmpBodyOccs),
// SLAB_MESH_COLLECTOR_NAME, SLAB_MESH_NAME,
// SLAB_ELEMENT_SIZE_NAME);
// }
// UpdateRailSlabConnection(pRailSlabFem);
// MergeDuplicateNodes();
//}
//void Train::UpdateBraseModel()
//{
// int carriageCnt = GetCarriageCount();
// if (carriageCnt == m_oldCarriageCount)
// return;
// BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
// CAE::FemPart *pBaseFem = pPrjProp->GetBraceFemPart();
// if (carriageCnt > m_oldCarriageCount)
// {
// if (pPrjProp->GetProjectType() == Project::ProjectType_Bridge)
// {
// std::vector<Body*> bodyOccs;
// bodyOccs = GetGeoModelOccs(pBaseFem, BRIDGE_BEAM_PART_NAME, BRIDGE_BEAM_BODY_NAME);
// SetFeGeometryData(pBaseFem, bodyOccs, false);
// UpdateSweptMesh(pBaseFem->BaseFEModel(), GetCaeBodies(bodyOccs),
// BRIDGE_MESH_COLLECTOR_NAME, BRIDGE_MESH_NAME,
// BRIDGE_ELEMENT_SIZE_NAME);
// }
// }
// UpdateBaseSlabConnection(pBaseFem);
// MergeDuplicateNodes();
//}
//std::vector<Body*> Train::GetGeoModelOccs(FemPart *pFemPart, const std::string &bodyPrtName, const std::string &bodyName)
//{
// std::vector<Body*> bodyOccs;
// if (pFemPart)
// {
// // Set fem part to display part
// PartCollection *pPrtCol = Session::GetSession()->Parts();
// //Part* pRootGeo = Project::Instance()->GetProperty()->GetGeometryPart();
// Part* pRootGeo = pFemPart->IdealizedPart();
// // get body part prototype
// Part *pBodyPrt = polymorphic_cast<Part*>(pPrtCol->FindObject(bodyPrtName.c_str()));
// // get body part occurrence
// Component *pRootComp = pRootGeo->ComponentAssembly()->RootComponent();
// std::vector<Assemblies::Component*> prtOccs(GetOccInCompTree(pRootComp, pBodyPrt));
// std::vector<Body*> bodyProtos(GetBodyByName(pBodyPrt, bodyName));
// for (unsigned int idx = 0; idx < prtOccs.size(); idx++)
// {
// for (unsigned int jdx = 0; jdx < bodyProtos.size(); jdx++)
// {
// Body *pBodyOcc = dynamic_cast<Body*>(prtOccs[idx]->FindOccurrence(bodyProtos[jdx]));
// if (pBodyOcc)
// bodyOccs.push_back(pBodyOcc);
// }
// }
// }
// return bodyOccs;
//}
//void Train::UpdateRailSlabConnection(FemPart *pFemPart)
//{
// std::vector<TaggedObject*> railConnectPts;
// std::vector<TaggedObject*> slabConnectPts;
// //railConnectPts = GetPointByAttrName(pFemPart, RAIL_CONNECTION_NAME);
// //slabConnectPts = GetPointByAttrName(pFemPart, SLAB_CONNECT_TO_RAIL_NAME);
// railConnectPts = GetPointByLayer(pFemPart, RAIL_CONNECTION_POINT_LAYER);
// slabConnectPts = GetPointByLayer(pFemPart, SLAB_CONNECT_TO_RAIL_POINT_LAYER);
// Update1DConnection(pFemPart->BaseFEModel(), railConnectPts, slabConnectPts,
// RAIL_SLAB_CONNECTION_NAME, RAIL_SLAB_CONNECTION_COLLECTOR_NAME);
//}
//void Train::UpdateBaseSlabConnection(FemPart *pFemPart)
//{
// std::vector<TaggedObject*> slabConnectPts;
// std::vector<TaggedObject*> baseConnectPts;
// std::vector<TaggedObject*> slabPartConnectPts;
// std::vector<TaggedObject*> basePartConnectPts;
// // get left side points
// slabPartConnectPts = GetPointByLayer(pFemPart, SLAB_CONNECT_TO_BASE_LEFT_POINT_LAYER);
// basePartConnectPts = GetPointByLayer(pFemPart, BASE_CONNECT_TO_SLAB_LEFT_POINT_LAYER);
// slabConnectPts.insert(slabConnectPts.end(), slabPartConnectPts.begin(), slabPartConnectPts.end());
// baseConnectPts.insert(baseConnectPts.end(), basePartConnectPts.begin(), basePartConnectPts.end());
// // get right side points
// slabPartConnectPts = GetPointByLayer(pFemPart, SLAB_CONNECT_TO_BASE_RIGHT_POINT_LAYER);
// basePartConnectPts = GetPointByLayer(pFemPart, BASE_CONNECT_TO_SLAB_RIGHT_POINT_LAYER);
// slabConnectPts.insert(slabConnectPts.end(), slabPartConnectPts.begin(), slabPartConnectPts.end());
// baseConnectPts.insert(baseConnectPts.end(), basePartConnectPts.begin(), basePartConnectPts.end());
// Update1DConnection(pFemPart->BaseFEModel(), slabConnectPts, baseConnectPts,
// SLAB_BASE_CONNECTION_NAME, SLAB_BASE_CONNECTION_COLLECTOR_NAME);
//}
void Train::OnInit()
{
m_oldCarriageCount = GetCarriageCount();
}
//void Train::SetFeGeometryData( FemPart * pFemPart, const std::vector<Body*> &bodyOccs, bool syncLines )
//{
// scoped_ptr<FemSynchronizeOptions> psyncData;
// psyncData.reset(pFemPart->NewFemSynchronizeOptions());
// psyncData->SetSynchronizeLinesFlag(syncLines);
// psyncData->SetSynchronizePointsFlag(true);
// pFemPart->SetGeometryData(FemPart::UseBodiesOptionSelectedBodies, bodyOccs, psyncData.get());
// //pFemPart->BaseFEModel()->UpdateFemodel();
//}
bool Train::CanUpdateRailSlabFEModel() const
{
return GetCarriageCount() > m_oldCarriageCount;
}
bool Train::CanUpdateBraseFEModel() const
{
return GetCarriageCount() > m_oldCarriageCount;
}
bool Train::CanUpdateRailSlabConnection() const
{
return GetCarriageCount() != m_oldCarriageCount;
}
bool Train::CanUpdateBraseConnection() const
{
return GetCarriageCount() != m_oldCarriageCount;
}
}
<file_sep>/src/NXVsar/include/VsarUI_NewProject.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_NewProject.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32
//
//==============================================================================
#ifndef VSARUI_NEWPROJECT_H_INCLUDED
#define VSARUI_NEWPROJECT_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseDialog.hxx>
namespace VsarUI
{
class NewProject : public BaseDialog
{
// class members
public:
NewProject();
~NewProject();
static void ShowDialog();
//void RegisterWithMenu();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the Vsar_NewProject.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
virtual int ApplyCb();
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
virtual bool Okay();
private:
//NXOpen::BlockStyler::UIBlock* m_prjGroup;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_prjName;// Block type: String
NXOpen::BlockStyler::UIBlock* m_prjPath;// Block type: NativeFolderBrowser
//NXOpen::BlockStyler::UIBlock* m_group;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_prjType;// Block type: Enumeration
NXOpen::BlockStyler::UIBlock* m_typeImg;// Block type: Label
};
}
#endif //VSARUI_NEWPROJECT_H_INCLUDED
<file_sep>/src/NXVsar/src/vsarint/VsarUI_NewProject.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_NewProject.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_NewProject.hxx>
#include <boost/scope_exit.hpp>
#include <boost/scoped_ptr.hpp>
#include <uf_part.h>
#include <uf_assem.h>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <NXOpen/BlockStyler_UIBlock.hxx>
#include <NXOpen/BlockStyler_PropertyList.hxx>
#include <Vsar_Project.hxx>
#include <Vsar_Utils.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace VsarUI
{
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
NewProject::NewProject() : BaseDialog("NewProject.dlx")
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
NewProject::~NewProject()
{
}
//#if CALLBACK
//------------------------------------------------------------------------------
//Method name: Show_Vsar_NewProject
//------------------------------------------------------------------------------
void NewProject::ShowDialog()
{
tag_t tTempPrt = NULL_TAG;
if (UF_ASSEM_ask_work_part() == NULL_TAG)
{
const char *strPrtName = "tempPart";
UF_CALL(UF_PART_new(strPrtName, 1, &tTempPrt));
}
BOOST_SCOPE_EXIT((&tTempPrt))
{
if (tTempPrt != NULL_TAG)
UF_CALL(UF_PART_close(tTempPrt, 0, 1));
}
BOOST_SCOPE_EXIT_END
boost::scoped_ptr<NewProject> pNewDialog(new NewProject());
try
{
// The following method shows the dialog immediately
pNewDialog->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//#endif//CALLBACK
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void NewProject::InitializeCb()
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
try
{
//m_prjGroup = pTopBlock->FindBlock("prjGroup");
m_prjName = pTopBlock->FindBlock("prjName");
m_prjPath = pTopBlock->FindBlock("prjPath");
//m_group = pTopBlock->FindBlock("group");
m_prjType = pTopBlock->FindBlock("prjType");
m_typeImg = pTopBlock->FindBlock("typeImg");
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void NewProject::DialogShownCb()
{
try
{
boost::scoped_ptr<PropertyList> pPrjPropList(m_prjType->GetProperties());
pPrjPropList->SetEnum("Value", 0);
UpdateCb(m_prjType);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
int NewProject::ApplyCb()
{
int errorCode = 0;
try
{
std::string prjName = m_prjName->GetProperties()->GetString("Value").GetUTF8Text();
std::string prjPath = m_prjPath->GetProperties()->GetString("Path").GetUTF8Text();
Project::ProjectType prjType(static_cast<Project::ProjectType>(m_prjType->GetProperties()->GetEnum("Value")));
Project::New(prjName, prjPath, prjType);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return errorCode;
}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int NewProject::UpdateCb(UIBlock* block)
{
try
{
if(block == m_prjName)
{
// TODO: Validate the name
}
else if(block == m_prjPath)
{
// TODO: Validate the path
}
else if(block == m_prjType)
{
boost::scoped_ptr<PropertyList> pPrjPropList(m_prjType->GetProperties());
boost::scoped_ptr<PropertyList> pImgPropList(m_typeImg->GetProperties());
switch (pPrjPropList->GetEnum("Value"))
{
case Project::ProjectType_Bridge:
pImgPropList->SetString("Bitmap", "bitmap/bridge.bmp");
break;
case Project::ProjectType_Selmi_Infinite:
pImgPropList->SetString("Bitmap", "bitmap/selmi.bmp");
break;
case Project::ProjectType_Tunnel:
pImgPropList->SetString("Bitmap", "bitmap/tunnel.bmp");
break;
default:
break;
}
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
bool NewProject::Okay()
{
std::string prjName = m_prjName->GetProperties()->GetString("Value").GetUTF8Text();
std::string prjPath = m_prjPath->GetProperties()->GetString("Path").GetUTF8Text();
return !prjName.empty() && !prjPath.empty();
}
}
<file_sep>/src/NXVsar/src/vsar/Vsar_Solution.cxx
#include <uf_defs.h>
#include <Vsar_Solution.hxx>
//#include <algorithm>
//#include <boost/bind.hpp>
//#include <NXOpen/Expression.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Project.hxx>
using namespace NXOpen;
using namespace boost;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
BaseSolution::~BaseSolution()
{
}
void BaseSolution::OnInit()
{
}
bool BaseSolution::HasGeometryDependency() const
{
return true;
}
bool BaseSolution::CanUpdateRailSlabFEModel() const
{
return false;
}
bool BaseSolution::CanUpdateBraseFEModel() const
{
return false;
}
bool BaseSolution::CanUpdateRailSlabConnection() const
{
return false;
}
bool BaseSolution::CanUpdateBraseConnection() const
{
return false;
}
static CompAttrInfo attrExpInfo[] =
{
{TRAIN_SPEED_ID_NAME, "", TRAIN_SPEED_EXP_NAME},
{COMPUTE_TIME_STEP_ID_NAME, "", COMPUTE_TIME_STEP_EXP_NAME}
};
ResponseSolution::ResponseSolution() : BaseSolution(attrExpInfo, N_ELEMENTS(attrExpInfo))
{
// Get result path name
BaseProjectProperty *pPrjProp = Project::Instance()->GetProperty();
std::string strSimPrt(pPrjProp->GetProjectName().append("_s"));
for (unsigned int idx = 0; idx < m_compAttrs.size(); idx++)
{
m_compAttrs[idx].m_partName = strSimPrt;
}
}
ResponseSolution::~ResponseSolution()
{
}
NoiseSolution::NoiseSolution() : BaseSolution(NULL, 0)
{
}
NoiseSolution::~NoiseSolution()
{
}
}<file_sep>/src/NXVsar/src/vsar/Vsar_SelmiInfiniteBase.cxx
#include <uf_defs.h>
#include <Vsar_SelmiInfiniteBase.hxx>
//#include <boost/cast.hpp>
#include <NXOpen/Expression.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Init_Utils.hxx>
using namespace NXOpen;
using namespace boost;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
static CompAttrInfo attrExpInfo[] =
{
{WIDTH_ID_NAME, BASE_PRT_PART_NAME, WIDTH_EXP_NAME},
{HEIGHT_ID_NAME, BASE_PRT_PART_NAME, HEIGHT_EXP_NAME}
};
SelmiInfiniteBase::SelmiInfiniteBase() : BaseComponent(attrExpInfo, N_ELEMENTS(attrExpInfo)),
m_oldHeight(1)
{
}
SelmiInfiniteBase::~SelmiInfiniteBase()
{
}
void SelmiInfiniteBase::OnInit()
{
m_oldHeight = GetHeight();
}
double SelmiInfiniteBase::GetHeight() const
{
Expression *pSupportCntExp = NULL;
if (!m_compAttrs.empty())
pSupportCntExp = GetExpression(m_compAttrs[1].m_partName, m_compAttrs[1].m_expName);
return pSupportCntExp ? pSupportCntExp->Value() : 0;
}
bool SelmiInfiniteBase::CanUpdateRailSlabFEModel() const
{
return false;
}
bool SelmiInfiniteBase::CanUpdateBraseFEModel() const
{
return GetHeight() != m_oldHeight;
}
bool SelmiInfiniteBase::CanUpdateRailSlabConnection() const
{
return false;
}
bool SelmiInfiniteBase::CanUpdateBraseConnection() const
{
return GetHeight() != m_oldHeight;
}
}<file_sep>/src/VsarInit/src/vsar/Vsar_Init_main.cxx
/*****************************************************************************
**
** VsarInit.cpp
**
** Description:
** Contains Unigraphics entry points for the application.
**
*****************************************************************************/
/* Include files */
#include <uf_defs.h>
#include <sstream>
#include <iostream>
#include <NXOpen/Session.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/Callback.hxx>
#include <NXOpen/PartCollection.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/MenuBar_MenuBarManager.hxx>
#include <NXOpen/ListingWindow.hxx>
#include <Vsar_Project.hxx>
using namespace NXOpen;
using namespace Vsar;
static void processException( const NXException &exc );
namespace Vsar
{
MenuBar::MenuBarManager::CallbackStatus NewProjectCB( MenuBar::MenuButtonEvent* buttonEvent )
{
ListingWindow *pLstWnd = Session::GetSession()->ListingWindow();
/* Open the UgInfoWindow */
if (!pLstWnd->IsOpen())
pLstWnd->Open();
/* Write the message to the UgInfoWindow. The str method */
/* freezes the buffer, so it must be unfrozen afterwards. */
pLstWnd->WriteLine( "New Project Dialog" );
return MenuBar::MenuBarManager::CallbackStatusContinue;
}
}
/*****************************************************************************
** Activation Methods
*****************************************************************************/
/* Unigraphics Startup
** This entry point activates the application at Unigraphics startup */
extern "C" DllExport void ufsta( char *param, int *returnCode, int rlen )
{
try
{
// Initialize the NX Open C++ API environment
Session *theSession = Session::GetSession();
//MenuBar::MenuBarManager *pMBMgr = UI::GetUI()->MenuBarManager();
//pMBMgr->AddMenuAction("Vsar_NewProject", make_callback(&Vsar::NewProjectCB));
theSession->Parts()->AddPartOpenedHandler(make_callback(&Project::Open));
theSession->Parts()->AddPartClosedHandler(make_callback(&Project::Close));
}
/* Handle errors */
catch ( const NXException &exc )
{
processException( exc );
}
}
/* Open Part
** This user exit is invoked after the following menu item is activated:
** "File->Open" */
extern "C" DllExport void ufget( char *param, int *returnCode, int rlen )
{
try
{
// Initialize the NX Open C++ API environment
Session *theSession = Session::GetSession();
/* TODO: Add your application code here */
}
/* Handle errors */
catch ( const NXException &exc )
{
processException( exc );
}
}
/*****************************************************************************
** Utilities
*****************************************************************************/
/* Unload Handler
** This function specifies when to unload your application from Unigraphics.
** If your application registers a callback (from a MenuScript item or a
** User Defined Object for example), this function MUST return
** "UF_UNLOAD_UG_TERMINATE". */
extern "C" DllExport int ufusr_ask_unload()
{
//return (int)Session::LibraryUnloadOptionExplicitly;
//return (int)Session::LibraryUnloadOptionImmediately;
return (int)Session::LibraryUnloadOptionAtTermination;
}
/* processException
Prints error messages to standard error and a Unigraphics
information window. */
static void processException( const NXException &exc )
{
/* Construct a buffer to hold the text. */
std::ostringstream error_message;
/* Initialize the buffer with the required text. */
error_message << std::endl
<< "Error:" << std::endl
<< exc.Message()
<< std::endl << std::endl << std::ends;
ListingWindow *pLstWnd = Session::GetSession()->ListingWindow();
/* Open the UgInfoWindow */
if (!pLstWnd->IsOpen())
pLstWnd->Open();
/* Write the message to the UgInfoWindow. The str method */
/* freezes the buffer, so it must be unfrozen afterwards. */
pLstWnd->WriteLine( error_message.str() );
/* Write the message to standard error */
std::cerr << error_message.str();
//error_message.rdbuf()->freeze( 0 );
}
<file_sep>/src/NXVsar/include/VsarUI_BaseCompDialog.hxx
#ifndef VSARUI_BASECOMPDIALOG_H_INCLUDED
#define VSARUI_BASECOMPDIALOG_H_INCLUDED
#include <VsarUI_BaseDialog.hxx>
namespace NXOpen
{
class Expression;
}
namespace Vsar
{
class BaseComponent;
}
namespace VsarUI
{
class BaseCompDialog : public BaseDialog
{
// class members
public:
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the Vsar_BaseCompDialog.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
//virtual void DialogShownCb();
virtual int ApplyCb();
//virtual int CancelCb();
//virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* pBlock);
protected:
BaseCompDialog(const std::string &dialogName, Vsar::BaseComponent *pComp);
virtual ~BaseCompDialog() = 0;
template <typename Handler>
void HandleExpressions(Handler hander);
void InitBlock( NXOpen::BlockStyler::UIBlock *pBlock, NXOpen::Expression *pExpression );
void WriteExpression( NXOpen::BlockStyler::UIBlock *pBlock, NXOpen::Expression *pExpression );
protected:
boost::scoped_ptr<Vsar::BaseComponent> m_pComp;
};
}
#endif //VSARUI_BASECOMPDIALOG_H_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_Bridge.hxx
#ifndef VSAR_BRIDGE_H_INCLUDED
#define VSAR_BRIDGE_H_INCLUDED
#include <Vsar_Component.hxx>
namespace Vsar
{
class Bridge : public BaseComponent
{
public:
Bridge();
~Bridge();
virtual void OnInit();
protected:
virtual bool CanUpdateRailSlabFEModel() const;
virtual bool CanUpdateBraseFEModel() const;
virtual bool CanUpdateRailSlabConnection() const;
virtual bool CanUpdateBraseConnection() const;
private:
double GetHeight() const;
int GetSpanCount() const;
private:
double m_oldHeight;
int m_oldSpanCount;
};
}
#endif //VSAR_BRIDGE_H_INCLUDED
<file_sep>/src/NXVsdaneWrap/NXVsdaneWrap/src/NXVsdaneBridge.cxx
// This is the main DLL file.
#include "NXVsdaneBridge.hxx"
#include "NXVsdaneDotNet.hxx"
using namespace System;
using namespace System::Runtime::InteropServices;
using namespace NXOpen;
using namespace NXOpen::Utilities;
using namespace NXOpen::CAE;
namespace Vsdane
{
NXVsdaneBridge::NXVsdaneBridge()
{
m_impl = gcnew NXVsdane();
}
//void NXVsdaneBridge::CreateSweptMesh(void *pFeModel,
// const std::string &meshColName, const std::string &meshName,
// void *pSrcFace, void *pTargetFace, void *pEleSize)
//{
//NXVsdane::CreateSweptMeshDotNet(UnmanagedPtr2ManagedIFEModel(pFeModel),
// UnmanagedString2ManagedString(meshColName.c_str()),
// UnmanagedString2ManagedString(meshName.c_str()),
// UnmanagedPtr2ManagedCAEFace(pSrcFace),
// UnmanagedPtr2ManagedCAEFace(pTargetFace),
// UnmanagedPtr2ManagedExpression(pEleSize)
// );
//}
void NXVsdaneBridge::CreateSweptMesh(tag_t tFeModel,
const std::string &meshColName, const std::string &meshName,
tag_t tSrcFace, tag_t tTargetFace, tag_t tEleSize)
{
//IFEModel^ pFeModel = (IFEModel^)NXObjectManager::Get((Tag)tFeModel);
m_impl->CreateSweptMeshDotNet((IFEModel^)NXObjectManager::Get((Tag)tFeModel),
UnmanagedString2ManagedString(meshColName.c_str()),
UnmanagedString2ManagedString(meshName.c_str()),
(CAEFace^)NXObjectManager::Get((Tag)tSrcFace),
(CAEFace^)NXObjectManager::Get((Tag)tTargetFace),
(Expression^)NXObjectManager::Get((Tag)tEleSize)
);
}
String^ NXVsdaneBridge::UnmanagedString2ManagedString(const char* pIn)
{
return Marshal::PtrToStringAnsi(static_cast<IntPtr>(const_cast<char*>(pIn)));
}
//generic<typename T>
//T^ UnmanagedPtr2ManagedType(void *nativePtr)
//{
// IntPtr ptrHandle = IntPtr(nativePtr);
// GCHandle handle = GCHandle::FromIntPtr(ptrHandle);
// T^ result=static_cast<T^>(handle.Target);
// handle.Free();
// return result;
//}
IFEModel^ NXVsdaneBridge::UnmanagedPtr2ManagedIFEModel(void *nativePtr)
{
IntPtr ptrHandle = IntPtr(nativePtr);
GCHandle handle = GCHandle::FromIntPtr(ptrHandle);
IFEModel^ result=static_cast<IFEModel^>(handle.Target);
handle.Free();
return result;
}
CAEFace^ NXVsdaneBridge::UnmanagedPtr2ManagedCAEFace(void *nativePtr)
{
IntPtr ptrHandle = IntPtr(nativePtr);
GCHandle handle = GCHandle::FromIntPtr(ptrHandle);
CAEFace^ result=static_cast<CAEFace^>(handle.Target);
handle.Free();
return result;
}
Expression^ NXVsdaneBridge::UnmanagedPtr2ManagedExpression(void *nativePtr)
{
IntPtr ptrHandle = IntPtr(nativePtr);
GCHandle handle = GCHandle::FromIntPtr(ptrHandle);
Expression^ result=static_cast<Expression^>(handle.Target);
handle.Free();
return result;
}
}<file_sep>/src/NXVsar/include/Vsar_Component.hxx
#ifndef VSAR_COMPONENT_H_INCLUDED
#define VSAR_COMPONENT_H_INCLUDED
#include <string>
#include <vector>
//#include <boost/scoped_ptr.hpp>
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace NXOpen
{
class Body;
class Expression;
namespace CAE
{
class BaseFemPart;
class FemPart;
class IFEModel;
class CAEBody;
class Mesh;
}
}
namespace Vsar
{
struct CompAttrInfo
{
std::string m_attrName;
std::string m_partName;
std::string m_expName;
};
typedef std::vector<CompAttrInfo> StlCompAttrInfoVector;
class BaseComponent
{
public:
BaseComponent(const CompAttrInfo *pCompAttrs, int compAttrCnt);
virtual ~BaseComponent();
void LoadGeometryPart(bool onDemandLoad = true);
const StlCompAttrInfoVector& GetAttrInfo() const
{
return m_compAttrs;
}
NXOpen::Expression* GetExpression(const std::string &attrName);
static NXOpen::Expression* GetExpression(const std::string &partName, const std::string &expName);
void UpdateModel();
virtual void OnInit();
protected:
virtual bool HasGeometryDependency() const;
void UpdateGeometryModel();
void UpdateFEModel();
template <typename UpdateCallback>
void UpdateFECompModel(NXOpen::CAE::BaseFemPart *pFem, UpdateCallback updateCb);
void UpdateRailSlabModel();
void UpdateBraseModel();
virtual bool CanUpdateRailSlabFEModel() const;
virtual bool CanUpdateBraseFEModel() const;
void UpdateAssembleModel();
void UpdateSweptMesh(NXOpen::CAE::IFEModel *pFeModel, const std::vector<NXOpen::CAE::CAEBody*> &pPolygonBodies,
const std::string &meshColName, const std::string &meshName, const std::string &eleSizeExpName);
void UpdateSweptMesh_sf(NXOpen::CAE::IFEModel *pFeModel, const std::vector<NXOpen::CAE::CAEBody*> &pPolygonBodies,
const std::string &meshColName, const std::string &meshName, NXOpen::Expression *pEleSize);
std::vector<NXOpen::Body*> GetGeoModelOccs(NXOpen::CAE::FemPart *pFemPart,
const std::string &bodyPrtName, const std::string &bodyName);
void UpdateRailSlabConnection();
void UpdateBaseSlabConnection();
virtual bool CanUpdateRailSlabConnection() const;
virtual bool CanUpdateBraseConnection() const;
void MergeDuplicateNodes(const std::vector<NXOpen::CAE::Mesh*> &meshToMergeNodes);
void SetFeGeometryData( NXOpen::CAE::FemPart * pFemPart, const std::vector<NXOpen::Body*> &bodyOccs, bool syncLines );
protected:
StlCompAttrInfoVector m_compAttrs;
};
}
//#if defined(__MSVC_RUNTIME_CHECKS)
//#undef __MSVC_RUNTIME_CHECKS
//#endif
//#if !defined(_DEBUG)
//#define _DEBUG
//#endif
#endif //VSAR_COMPONENT_H_INCLUDED
<file_sep>/src/NXVsar/src/vsarint/VsarUI_SolveNoise.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Solve.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 05-23-2011 (Format: mm-dd-yyyy)
// Time: 20:48 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_SolveNoise.hxx>
#include <boost/foreach.hpp>
#include <uf_object_types.h>
#include <uf_ui_types.h>
#include <NXOpen/UI.hxx>
#include <NXOpen/Point.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <NXOpen/BlockStyler_UIBlock.hxx>
#include <NXOpen/BlockStyler_PropertyList.hxx>
#include <Vsar_SolveOperation.hxx>
//#include <Vsar_Names.hxx>
#include <Vsar_Solution.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
namespace VsarUI
{
//SelectPoint::SelectPoint() : BaseDialog("SelectPoint.dlx")
//{
//}
//SelectPoint::~SelectPoint()
//{
//}
////------------------------------------------------------------------------------
////Callback Name: initialize_cb
////------------------------------------------------------------------------------
//void SelectPoint::InitializeCb()
//{
// try
// {
// CompositeBlock *pTopBlock = m_theDialog->TopBlock();
// m_selectPoint = pTopBlock->FindBlock("selectPoint");
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
//}
//int SelectPoint::ApplyCb()
//{
// return 0;
//}
//int SelectPoint::UpdateCb(NXOpen::BlockStyler::UIBlock* block)
//{
// return 0;
//}
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
SolveNoise::SolveNoise() : BaseCompDialog("SolveNoise.dlx", new NoiseSolution())
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
SolveNoise::~SolveNoise()
{
}
//------------------------------------------------------------------------------
//Method name: Show_Solve
//------------------------------------------------------------------------------
void SolveNoise::ShowDialog()
{
boost::scoped_ptr<SolveNoise> pSolveDlg(new SolveNoise());
try
{
// The following method shows the dialog immediately
pSolveDlg->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void SolveNoise::InitializeCb()
{
try
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
//m_outputPointList = pTopBlock->FindBlock("outputPointList");
m_selectPoints = pTopBlock->FindBlock("selectPoints");
//m_specifyPoint = pTopBlock->FindBlock("specifyPoint");
boost::scoped_ptr<PropertyList> pSelPtsPropList(m_selectPoints->GetProperties());
std::vector<Selection::MaskTriple> maskTriples;
maskTriples.push_back(Selection::MaskTriple(UF_point_type, UF_all_subtype, UF_UI_SEL_FEATURE_BODY));
pSelPtsPropList->SetSelectionFilter("SelectionFilter",
Selection::SelectionActionClearAndEnableSpecific, maskTriples);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void SolveNoise::DialogShownCb()
{
try
{
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
int SolveNoise::FilterCb(NXOpen::BlockStyler::UIBlock *pBlock, NXOpen::TaggedObject *pSel)
{
// TODO: Filter
return UF_UI_SEL_ACCEPT;
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
int SolveNoise::ApplyCb()
{
int errorCode = 0;
try
{
boost::scoped_ptr<PropertyList> pSelPtsPropList(m_selectPoints->GetProperties());
std::vector<TaggedObject*> selPts(pSelPtsPropList->GetTaggedObjectVector("SelectedObjects"));
std::vector<Point*> outputPoints;
outputPoints.reserve(selPts.size());
BOOST_FOREACH(TaggedObject *pSelObj, selPts)
{
Point *pPoint = dynamic_cast<Point*>(pSelObj);
if (pPoint)
outputPoints.push_back(pPoint);
}
SolveNoiseOperation solveOper(outputPoints);
solveOper.Execute();
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return errorCode;
}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int SolveNoise::UpdateCb(UIBlock* block)
{
try
{
if (block == m_selectPoints)
{
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
//bool SolveNoise::CanOutputElements() const
//{
// boost::scoped_ptr<PropertyList> pHasElemsPropList(m_hasElemsOutput->GetProperties());
// return pHasElemsPropList->GetLogical("Value");
//}
//std::vector<TaggedObject*> SolveNoise::GetOutputElements() const
//{
// boost::scoped_ptr<PropertyList> pOutputElemsPropList(m_outputElements->GetProperties());
// return pOutputElemsPropList->GetTaggedObjectVector("SelectedObjects");
//}
//bool SolveNoise::CanOutputNodes() const
//{
// boost::scoped_ptr<PropertyList> pHasNodesPropList(m_hasNodesOutput->GetProperties());
// return pHasNodesPropList->GetLogical("Value");
//}
//std::vector<TaggedObject*> SolveNoise::GetOutputNodes() const
//{
// boost::scoped_ptr<PropertyList> pOutputNodesPropList(m_outputNodes->GetProperties());
// return pOutputNodesPropList->GetTaggedObjectVector("SelectedObjects");
//}
//bool SolveNoise::CanOutputNodesForNoise() const
//{
// boost::scoped_ptr<PropertyList> pHasNoiseNodesPropList(m_hasNoiseNodeOutput->GetProperties());
// return pHasNoiseNodesPropList->GetLogical("Value");
//}
}
<file_sep>/src/NXVsar/src/vsarint/VsarUI_TrainSettings.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\TrainSettings.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 04-08-2011 (Format: mm-dd-yyyy)
// Time: 21:59 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_TrainSettings.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
//#include <NXOpen/BlockStyler_PropertyList.hxx>
//#include <NXOpen/Part.hxx>
//#include <NXOpen/PartCollection.hxx>
//#include <NXOpen/Expression.hxx>
//#include <NXOpen/ExpressionCollection.hxx>
//#include <boost/filesystem.hpp>
//#include <boost/scope_exit.hpp>
//#include <NXOpen/CAE_BaseFemPart.hxx>
//#include <NXOpen/CAE_BaseFEModel.hxx>
#include <Vsar_Project.hxx>
//#include <Vsar_Utils.hxx>
#include <Vsar_Train.hxx>
#include <Vsar_Names.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
namespace VsarUI
{
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
TrainSettings::TrainSettings() : BaseCompDialog("TrainSettings.dlx", new Train)
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
TrainSettings::~TrainSettings()
{
}
//------------------------------------------------------------------------------
//Method name: Show_TrainSettings
//------------------------------------------------------------------------------
void TrainSettings::ShowDialog()
{
boost::scoped_ptr<TrainSettings> pTrainSettingDlg(new TrainSettings());
try
{
// The following method shows the dialog immediately
pTrainSettingDlg->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void TrainSettings::InitializeCb()
{
BaseCompDialog::InitializeCb();
try
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
//m_grpCarriage = pTopBlock->FindBlock("grpCarriage");
m_carriageCount = pTopBlock->FindBlock(TRAIN_CARRIAGE_COUNT_ID_NAME);
m_carriageLength = pTopBlock->FindBlock("carriageLength");
m_carriageDistance = pTopBlock->FindBlock(TRAIN_CARRIAGE_DISTANCE_ID_NAME);
m_carriageWeight = pTopBlock->FindBlock(TRAIN_CARRIAGE_WEIGHT_ID_NAME);
//m_grpBogie = pTopBlock->FindBlock("grpBogie");
m_bogieLength = pTopBlock->FindBlock(TRAIN_BOGIE_LENGTH_ID_NAME);
m_halfBogieDistance = pTopBlock->FindBlock(TRAIN_HALF_BOGIE_DISTANCE_ID_NAME);
m_bogieWeight = pTopBlock->FindBlock(TRAIN_BOGIE_WEIGHT_ID_NAME);
//m_grpWheelSet = pTopBlock->FindBlock("grpWheelSet");
m_wheelSetInterval = pTopBlock->FindBlock(TRAIN_WHEELSET_INTERVAL_ID_NAME);
m_wheelSetWeight = pTopBlock->FindBlock(TRAIN_WHEELSET_WEIGHT_ID_NAME);
m_wheelSetSglStgSusp = pTopBlock->FindBlock(TRAIN_WHEELSET_SGL_STG_SUSP_STIFF_ID_NAME);
m_wheelSetSglStgSuspDamp = pTopBlock->FindBlock(TRAIN_WHEELSET_SGL_STG_SUSP_DAMP_ID_NAME);
m_wheelSetTwoStgSuspStiff = pTopBlock->FindBlock(TRAIN_WHEELSET_TWO_STG_SUSP_STIFF_ID_NAME);
m_wheelSetTwoStgSuspDamp = pTopBlock->FindBlock(TRAIN_WHEELSET_TWO_STG_SUSP_DAMP_ID_NAME);
m_wheelSetContactCoefElast = pTopBlock->FindBlock(TRAIN_WHEELSET_CONTACT_COEF_ELAST_ID_NAME);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void TrainSettings::DialogShownCb()
{
try
{
//---- Enter your callback code here -----
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
//int TrainSettings::ApplyCb()
//{
// int errorCode = BaseCompDialog::ApplyCb();
// if (errorCode != 0)
// return errorCode;
// try
// {
// ;
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// errorCode = 1;
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
// return errorCode;
//}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int TrainSettings::UpdateCb(UIBlock* block)
{
try
{
if(block == m_carriageCount)
{
//---------Enter your code here-----------
}
else if(block == m_carriageLength)
{
//---------Enter your code here-----------
}
else if(block == m_carriageDistance)
{
//---------Enter your code here-----------
}
else if(block == m_carriageWeight)
{
//---------Enter your code here-----------
}
else if(block == m_bogieLength)
{
//---------Enter your code here-----------
}
else if(block == m_halfBogieDistance)
{
//---------Enter your code here-----------
}
else if(block == m_bogieWeight)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetInterval)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetWeight)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetSglStgSusp)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetSglStgSuspDamp)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetTwoStgSuspStiff)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetTwoStgSuspDamp)
{
//---------Enter your code here-----------
}
else if(block == m_wheelSetContactCoefElast)
{
//---------Enter your code here-----------
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
}
<file_sep>/src/NXVsar/src/vsarint/VsarUI_BaseDialog.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_BaseDialog.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_BaseDialog.hxx>
#include <uf_ui_types.h>
#include <NXOpen/Session.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <NXOpen/Callback.hxx>
#include <NXOpen/NXException.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace VsarUI
{
// BaseDialog *theVsar_BaseDialog;
//------------------------------------------------------------------------------
// Initialize static variables
//------------------------------------------------------------------------------
Session* BaseDialog::s_theSession = NULL;
UI* BaseDialog::s_theUI = NULL;
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
BaseDialog::BaseDialog(const std::string &dialogName)
{
try
{
// Initialize the NX Open C++ API environment
s_theSession = Session::GetSession();
s_theUI = UI::GetUI();
m_theDialog.reset(s_theUI->CreateDialog(dialogName.c_str()));
// Registration of callback functions
m_theDialog->AddApplyHandler(make_callback(this, &BaseDialog::ApplyCb));
m_theDialog->AddOkHandler(make_callback(this, &BaseDialog::OkCb));
m_theDialog->AddEnableOKButtonHandler(make_callback(this, &BaseDialog::Okay));
m_theDialog->AddCancelHandler(make_callback(this, &BaseDialog::CancelCb));
m_theDialog->AddUpdateHandler(make_callback(this, &BaseDialog::UpdateCb));
m_theDialog->AddFilterHandler(make_callback(this, &BaseDialog::FilterCb));
m_theDialog->AddInitializeHandler(make_callback(this, &BaseDialog::InitializeCb));
m_theDialog->AddFocusNotifyHandler(make_callback(this, &BaseDialog::FocusNotifyCb));
m_theDialog->AddKeyboardFocusNotifyHandler(make_callback(this, &BaseDialog::KeyboardFocusNotifyCb));
m_theDialog->AddDialogShownHandler(make_callback(this, &BaseDialog::DialogShownCb));
}
catch(std::exception&)
{
//---- Enter your exception handling code here -----
throw;
}
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
BaseDialog::~BaseDialog()
{
}
int BaseDialog::Show(BlockDialog::DialogMode dialogMode)
{
try
{
m_theDialog->Show(dialogMode);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void BaseDialog::DialogShownCb()
{
try
{
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
int BaseDialog::ApplyCb()
{
int errorCode = 0;
try
{
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return errorCode;
}
int BaseDialog::CancelCb()
{
int errorCode = 0;
try
{
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return errorCode;
}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int BaseDialog::UpdateCb(UIBlock* block)
{
try
{
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
//------------------------------------------------------------------------------
//Callback Name: ok_cb
//------------------------------------------------------------------------------
int BaseDialog::OkCb()
{
int errorCode = 0;
try
{
errorCode = ApplyCb();
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return errorCode;
}
bool BaseDialog::Okay()
{
return true;
}
int BaseDialog::FilterCb(NXOpen::BlockStyler::UIBlock *pBlock, NXOpen::TaggedObject *pSel)
{
return UF_UI_SEL_ACCEPT;
}
//------------------------------------------------------------------------------
//Callback Name: focusNotify_cb
//This callback is executed when any block (except the ones which receive keyboard entry such as Integer block) receives focus.
//------------------------------------------------------------------------------
void BaseDialog::FocusNotifyCb(UIBlock* block, bool focus)
{
try
{
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: keyboardFocusNotify_cb
//This callback is executed when block which can receive keyboard entry, receives the focus.
//------------------------------------------------------------------------------
void BaseDialog::KeyboardFocusNotifyCb(UIBlock* block, bool focus)
{
try
{
//---- Enter your callback code here -----
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
}
<file_sep>/src/NXVsar/include/VsarUI_RailSettings.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\RailSettings.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 04-07-2011 (Format: mm-dd-yyyy)
// Time: 22:30
//
//==============================================================================
#ifndef VSARUI_RAILSETTINGS_H_INCLUDED
#define VSARUI_RAILSETTINGS_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseCompDialog.hxx>
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace VsarUI
{
class RailSettings : public BaseCompDialog
{
// class members
public:
RailSettings();
~RailSettings();
static void ShowDialog();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the RailSettings.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
//virtual int ApplyCb();
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
private:
//NXOpen::BlockStyler::UIBlock* m_grpGeometry;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_length;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_width;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_eleSize;// Block type: Expression
//NXOpen::BlockStyler::UIBlock* m_grpMaterial;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_density;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_elasticModulus;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_poissonRatio;// Block type: Expression
};
}
#endif //VSARUI_RAILSETTINGS_H_INCLUDED
<file_sep>/src/NXVsar/src/vsar/Vsar_Utils.cxx
#include <uf_defs.h>
#include <Vsar_Utils.hxx>
#include <uf.h>
#include <uf_ui.h>
#include <uf_obj.h>
#if 0
#include <uf_assem.h>
#include <boost/shared_array.hpp>
#endif
#include <sstream>
#include <functional>
#include <algorithm>
#include <boost/filesystem.hpp>
#include <boost/scope_exit.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/shared_array.hpp>
#include <boost/bind.hpp>
//#include <boost/lambda/lambda.hpp>
//#include <boost/lambda/bind.hpp>
#include <boost/cast.hpp>
#include <boost/foreach.hpp>
#include <NXOpen/Session.hxx>
#include <NXOpen/NXException.hxx>
#include <NXOpen/PartCollection.hxx>
#include <NXOpen/Part.hxx>
//#include <NXOpen/CAE_FemPart.hxx>
#include <NXOpen/Body.hxx>
#include <NXOpen/Point.hxx>
#include <NXOpen/BodyCollection.hxx>
#include <NXOpen/PointCollection.hxx>
#include <NXOpen/NXObjectManager.hxx>
//#include <NXOpen/UnitCollection.hxx>
//#include <NXOpen/Unit.hxx>
//#include <NXOpen/ExpressionCollection.hxx>
#include <NXOpen/Expression.hxx>
#include <NXOpen/SelectDisplayableObjectList.hxx>
#include <NXOpen/SelectDisplayableObject.hxx>
#include <NXOpen/Assemblies_Component.hxx>
#include <NXOpen/CAE_BaseFEModel.hxx>
#include <NXOpen/CAE_BaseFemPart.hxx>
#include <NXOpen/CAE_CAEBody.hxx>
#include <NXOpen/CAE_CAEFace.hxx>
#include <NXOpen/CAE_MeshManager.hxx>
#include <NXOpen/CAE_SweptMesh.hxx>
#include <NXOpen/CAE_Mesh3dHexBuilder.hxx>
#include <NXOpen/CAE_MeshCollector.hxx>
#include <NXOpen/CAE_FENode.hxx>
#include <NXOpen/CAE_FEElement.hxx>
#include <NXOpen/CAE_ElementTypeBuilder.hxx>
#include <NXOpen/CAE_DestinationCollectorBuilder.hxx>
#include <NXOpen/CAE_PropertyTable.hxx>
#include <NXOpen/CAE_CAEConnectionCollection.hxx>
#include <NXOpen/CAE_CAEConnection.hxx>
#include <NXOpen/CAE_CAEConnectionBuilder.hxx>
#include <NXOpen/CAE_FEModelOccurrence.hxx>
#include <NXOpen/CAE_IMeshManager.hxx>
#include <NXOpen/CAE_SimPart.hxx>
#include <NXOpen/CAE_SimSimulation.hxx>
#include <NXOpen/CAE_SmartSelectionManager.hxx>
#include <NXOpen/CAE_RelatedNodeMethod.hxx>
#include <NXOpen/SelectTaggedObjectList.hxx>
#include <uf_sf.h>
//#include <uf_so.h>
#ifdef DEBUG
#include <boost/lexical_cast.hpp>
#include <NXOpen/ListingWindow.hxx>
#endif
#include <Vsar_Project.hxx>
#include <Vsar_Names.hxx>
using namespace boost;
using namespace NXOpen;
using namespace NXOpen::Assemblies;
using namespace NXOpen::CAE;
using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
int ReportError(const std::string &file, int line, const std::string &call, int irc)
{
if (irc)
{
char messg[MAX_LINE_SIZE + 1]/*, str1[100]*/;
std::stringstream errMsgStream;
if (UF_get_fail_message(irc, messg))
errMsgStream << "returned a " << irc << "\n";
else
errMsgStream << "returned error " << irc << ": " << messg << "\n";
#if defined(DEBUG) || defined(_DEBUG)
{
filesystem::path filePath(file);
std::string errMsg(errMsgStream.str());
errMsgStream.clear();
errMsgStream << filePath.filename() << ", line " << line << ": " << call << "\n" << errMsg;
UF_UI_open_listing_window();
UF_UI_write_listing_window(errMsgStream.str().c_str());
}
#else
UF_UI_set_status(const_cast<char*>(errMsgStream.str().c_str()));
#endif
throw NXException::Create(irc);
}
return(irc);
}
void LoadRootPart()
{
Project* pPrj = Project::Instance();
if (pPrj)
{
Session *pSession = Session::GetSession();
PartCollection *pPrtCol = pSession->Parts();
BaseProjectProperty *pPrjProp = pPrj->GetProperty();
boost::filesystem::path rootPathName(pPrjProp->GetProjectPath());
rootPathName /= pPrjProp->GetRootPartName();
PartLoadStatus *pPrtLoadStatus = NULL;
Part* pPrt = pPrtCol->Open(rootPathName.string().c_str(), &pPrtLoadStatus);
pPrt->LoadFully();
BOOST_SCOPE_EXIT((&pPrtLoadStatus))
{
delete pPrtLoadStatus;
}
BOOST_SCOPE_EXIT_END
//pPrtRoot->LoadFully();
}
}
std::vector<Body*> GetBodyByName( Part * pBodyPrt, const std::string &bodyName )
{
BodyCollection *pBodyProtoCol = pBodyPrt->Bodies();
std::vector<Body*> bodyProtos;
// Get body prototypes
for (BodyCollection::iterator iter = pBodyProtoCol->begin(); iter != pBodyProtoCol->end(); ++iter)
{
if (bodyName.compare((*iter)->Name().GetText()) == 0)
bodyProtos.push_back(*iter);
}
return bodyProtos;
}
// Select points by Attribute Name
std::vector<TaggedObject*> GetPointByAttrName( BasePart * pPtPrt, const std::string &attrName )
{
PointCollection *pPtOccCol = pPtPrt->Points();
std::vector<TaggedObject*> ptSel;
typedef std::vector<NXObject::AttributeInformation> StlAttrInfoVector;
//boost::function<bool (StlAttrInfoVector::iterator)> findAlgo = ;
// Get body prototypes
for (PointCollection::iterator iter = pPtOccCol->begin(); iter != pPtOccCol->end(); ++iter)
{
StlAttrInfoVector vAttrInfo((*iter)->GetAttributeTitlesByType(NXObject::AttributeTypeNull));
//if ( std::find_if(vAttrInfo.begin(), vAttrInfo.end(),
// boost::bind(&std::string::compare, attrName,
// boost::bind<const char*>(&NXString::GetText,
// boost::bind<NXString>(&NXObject::AttributeInformation::Title, _1))) == 0)
// != vAttrInfo.end() )
for (unsigned int idx = 0; idx < vAttrInfo.size(); idx++)
{
if (vAttrInfo[idx].Title.GetText() == attrName)
{
ptSel.push_back(*iter);
break;
}
}
}
return ptSel;
}
// Select points by layer
std::vector<TaggedObject*> GetPointByLayer( BasePart * pPtPrt, int layer )
{
PointCollection *pPtCol = pPtPrt->Points();
std::vector<TaggedObject*> ptSel;
// Get body prototypes
for (PointCollection::iterator iter = pPtCol->begin(); iter != pPtCol->end(); ++iter)
{
if ((*iter)->Layer() == layer)
ptSel.push_back(*iter);
}
return ptSel;
}
std::vector<Assemblies::Component*> GetOccInCompTree(Assemblies::Component *pAssemTree, Part *pPrt)
{
std::vector<Assemblies::Component*> prtOccs;
if (pAssemTree && pPrt)
{
// NOTE: There's problem with this API: Component::GetChildren()
std::vector<Assemblies::Component*> childComps(pAssemTree->GetChildren());
for (unsigned int idx = 0; idx < childComps.size(); idx++)
{
Component *pChildOcc = childComps[idx];
if (pChildOcc && pChildOcc->Prototype() == pPrt)
prtOccs.push_back(pChildOcc);
else
{
std::vector<Assemblies::Component*> childOccComps(GetOccInCompTree(pChildOcc, pPrt));
prtOccs.insert(prtOccs.end(), childOccComps.begin(), childOccComps.end());
}
}
}
return prtOccs;
}
void EditSweptMeshData(IFEModel *pFeModel, const std::string &meshName,
const std::vector<Body*> &srcBodies)
{
if (!pFeModel)
return;
//MeshManager *pMeshMgr = polymorphic_cast<MeshManager*>(pFeModel->MeshManager());
//SweptMesh *pMesh3d = polymorphic_cast<SweptMesh*>(pMeshMgr->FindObject(meshName.c_str()));
//boost::shared_ptr<Mesh3dHexBuilder> pMesh3dBuilder(pMeshMgr->CreateMesh3dHexBuilder(pMesh3d),
// boost::bind(&Builder::Destroy, _1));
//std::vector<DisplayableObject*> meshBody(GetCaeBodies(srcBodies));
//logical success = pMesh3dBuilder->SelectionList()->Add(meshBody);
//std::vector<Mesh *> pvMesh = pMesh3dBuilder->CommitMesh();
}
void CreateSweptMesh( MeshManager *pMeshMgr, MeshCollector *pMeshCol, const std::string &meshName,
CAEFace* pSrcFace, CAEFace *pTargetFace, const std::string &eleSizeExpName)
{
Session *pSession = Session::GetSession();
boost::shared_ptr<Mesh3dHexBuilder> pMesh3dHexBuilder(pMeshMgr->CreateMesh3dHexBuilder(NULL),
boost::bind(&Builder::Destroy, _1));
ElementTypeBuilder *pEleTypeBuilder = pMesh3dHexBuilder->ElementType();
pEleTypeBuilder->SetElementDimension(CAE::ElementTypeBuilder::ElementTypeSweepSolid);
pEleTypeBuilder->SetElementTypeName("CHEXA(8)");
DestinationCollectorBuilder *pDstColBulder = pEleTypeBuilder->DestinationCollector();
pDstColBulder->SetElementContainer(pMeshCol);
pDstColBulder->SetAutomaticMode(false);
pMesh3dHexBuilder->SetCreationType(Mesh3dHexBuilder::TypeManual);
bool bSrcFaceAdded = pMesh3dHexBuilder->SourceFaceList()->Add(pSrcFace);
pMesh3dHexBuilder->TargetFace()->SetValue(pTargetFace);
PropertyTable *pPropTable = pMesh3dHexBuilder->PropertyTable();
pPropTable->SetBooleanPropertyValue("mapped mesh option bool", false);
pPropTable->SetIntegerPropertyValue("quad only option", 1);
pPropTable->SetBooleanPropertyValue("project vertices option", false);
pPropTable->SetBooleanPropertyValue("target face smoothing option", false);
// set element size
Expression *pEleSizeExp = pPropTable->GetScalarPropertyValue("source element size");
pEleSizeExp->SetRightHandSide(eleSizeExpName);
// pPropTable->SetScalarPropertyValue("source element size", pEleSizeExp);
Session::UndoMarkId undoMark = pSession->SetUndoMark(Session::MarkVisibilityInvisible, "Create Swept Mesh");
int nErrs = pSession->UpdateManager()->DoUpdate(undoMark);
#ifdef DEBUG
if (nErrs > 0)
{
ListingWindow *pLstWnd = pSession->ListingWindow();
pLstWnd->Open();
pLstWnd->WriteLine("\n");
ErrorList *pErrLst = pSession->UpdateManager()->ErrorList();
for (int idx = 0; idx < nErrs; idx++)
{
pLstWnd->WriteLine((lexical_cast<std::string>(pErrLst->GetErrorInfo(idx)->ErrorCode())).c_str());
pLstWnd->WriteLine(pErrLst->GetErrorInfo(idx)->Description());
pLstWnd->WriteLine(pErrLst->GetErrorInfo(idx)->ErrorObjectDescription());
pLstWnd->WriteLine("\n");
}
pErrLst->Clear();
}
#endif
std::vector<CAE::Mesh*> createdMeshes(pMesh3dHexBuilder->CommitMesh());
std::for_each(createdMeshes.begin(), createdMeshes.end(),
boost::bind(&NXObject::SetName, _1, meshName.c_str()));
pSession->DeleteUndoMark(undoMark, NULL);
}
void CreateSweptMesh_sf(IFEModel *pFeModel, CAEBody *pSolidBody,
const std::string &meshColName, const std::string &meshName,
CAEFace* pSrcFace, Expression *pEleSize)
{
tag_t tSweptMesh;
int err = UF_CALL(UF_SF_create_swept_hex_mesh(pSolidBody->Tag(), pSrcFace->Tag(), true,
pEleSize->Value(), &tSweptMesh));
#if 0
if (err == 0)
{
MeshManager *pMeshMgr = polymorphic_cast<MeshManager*>(pFeModel->MeshManager());
SweptMesh *pMesh = dynamic_cast<SweptMesh*>(NXObjectManager::Get(tSweptMesh));
pMesh->SetName(meshName.c_str());
std::string srcMeshColName = std::string("MeshCollector[").append(meshColName).append("]");
MeshCollector *pDstMeshCol(polymorphic_cast<MeshCollector*>(pMeshMgr->FindObject(srcMeshColName.c_str())));
boost::shared_ptr<Mesh3dHexBuilder> pMesh3dHexBuilder(pMeshMgr->CreateMesh3dHexBuilder(pMesh),
boost::bind(&Builder::Destroy, _1));
ElementTypeBuilder *pEleTypeBuilder = pMesh3dHexBuilder->ElementType();
DestinationCollectorBuilder *pSrcColBulder = pEleTypeBuilder->DestinationCollector();
//pMeshMgr->DragNDropMesh(pMesh, pSrcColBulder->ElementContainer(), pDstMeshCol);
PropertyTable *pPropTable = pMesh3dHexBuilder->PropertyTable();
// set element size
Expression *pEleSizeExp = pPropTable->GetScalarPropertyValue("source element size");
pEleSizeExp->SetRightHandSide(pEleSize->RightHandSide());
pMesh3dHexBuilder->CommitMesh();
}
#endif
}
void Update1DConnection(BaseFEModel *pFeModel,
const std::vector<TaggedObject*> &railConnectPts,
const std::vector<TaggedObject*> &slabConnectPts,
const std::string &connName,
const std::string &connColName,
const std::string &meshName)
{
CAEConnectionCollection *pCaeConnCol = pFeModel->CaeConnections();
CAEConnection *pCaeConn = NULL;
//bool createMode = false;
try
{
pCaeConn = pCaeConnCol->FindObject(std::string("Connection[") + connName + "]");
}
catch (NXException&)
{
pCaeConn = NULL;
//createMode = true;
}
boost::shared_ptr<CAEConnectionBuilder> pCaeConnBuilder(pCaeConnCol->CreateConnectionBuilder(pCaeConn), boost::bind(&Builder::Destroy, _1));
pCaeConnBuilder->SourceSelection()->Add(railConnectPts);
pCaeConnBuilder->TargetSelection()->Add(slabConnectPts);
MeshManager *pMeshMgr = polymorphic_cast<MeshManager*>(pFeModel->MeshManager());
std::string meshColFullName((boost::format(FIND_MESH_COL_PATTERN_NAME) % connColName).str());
MeshCollector *pMeshCol = polymorphic_cast<MeshCollector*>(pMeshMgr->FindObject(meshColFullName.c_str()));
DestinationCollectorBuilder *pDstCol = pCaeConnBuilder->ElementType()->DestinationCollector();
pDstCol->SetAutomaticMode(false);
pDstCol->SetElementContainer(pMeshCol);
pCaeConnBuilder->ElementType()->SetElementTypeName("CBUSH");
pCaeConnBuilder->SetType(CAEConnectionBuilder::ConnectionTypeEnumPointToPoint);
pCaeConnBuilder->SetMethodType(CAEConnectionBuilder::MethodTypeEnumProximity);
pCaeConn = dynamic_cast<CAEConnection*>(pCaeConnBuilder->Commit());
pCaeConn->SetName(connName);
std::vector<Mesh*> pMeshes(pMeshCol->GetMeshes());
if (pMeshes.size() == 1)
{
pMeshes.back()->SetName(meshName);
}
}
std::vector<CAEBody*> GetCaeBodies(const std::vector<Body*> &srcBodies)
{
std::vector<CAEBody*> caeBodies;
// Get CAEBody tag
caeBodies.reserve(srcBodies.size());
for (unsigned int idx = 0; idx < srcBodies.size(); idx++)
{
tag_t tCaeBody = NULL_TAG;
UF_SF_modl_body_ask_body(srcBodies[idx]->Tag(), &tCaeBody);
caeBodies.push_back(dynamic_cast<CAEBody*>(NXObjectManager::Get(tCaeBody)));
}
return caeBodies;
}
std::vector<CAEFace*> GetCaeFaceByName(CAEBody *pCaeBody, const std::string &faceName)
{
std::vector<CAEFace*> caeFaces;
int numFaces;
tag_t *tFaces = NULL;
UF_SF_body_ask_faces(pCaeBody->Tag(), &numFaces, &tFaces);
boost::shared_array<tag_t> tFacesArray(tFaces, UF_free);
for (int idx = 0; idx < numFaces; idx++)
{
CAEFace *pCaeFace = dynamic_cast<CAEFace*>(NXObjectManager::Get(tFaces[idx]));
const char *strName = pCaeFace->Name().GetText();
if (pCaeFace && faceName.compare(strName ? strName : "") == 0)
{
caeFaces.push_back(pCaeFace);
}
}
return caeFaces;
}
template<class GeomClass>
static std::vector<GeomClass*> GetCaeGeomByName(CaePart *pPrt, const std::string &geomName, int geomType)
{
std::vector<GeomClass*> caeGeoms;
int rtc = 0;
tag_t tCaeGeom = NULL_TAG;
rtc = UF_OBJ_cycle_by_name_and_type(pPrt->Tag(), geomName.c_str(), geomType, true, &tCaeGeom);
//rtc = UF_OBJ_cycle_by_name(const_cast<char*>(bodyName), &tCaeBody);
while (tCaeGeom != NULL_TAG)
{
GeomClass *pCaeBody = dynamic_cast<GeomClass*>(NXObjectManager::Get(tCaeGeom));
if (pCaeBody)
{
caeGeoms.push_back(pCaeBody);
}
rtc = UF_OBJ_cycle_by_name_and_type(pPrt->Tag(), geomName.c_str(), geomType, true, &tCaeGeom);
}
return caeGeoms;
}
std::vector<CAEBody*> GetCaeBodyByName(CaePart *pPrt, const std::string &bodyName)
{
return GetCaeGeomByName<CAEBody>(pPrt, bodyName, UF_caegeom_type);
}
std::vector<CAEFace*> GetCaeFaceByName(CaePart *pPrt, const std::string &faceName)
{
return GetCaeGeomByName<CAEFace>(pPrt, faceName, UF_caegeom_type);
}
std::vector<CAEFace*> GetCaeFacesOfBodyByName(CaePart *pPrt, const std::string &bodyName, const std::string &faceName)
{
std::vector<CAEFace*> pAllFaces;
std::vector<CAEBody*> pBodies(GetCaeBodyByName(pPrt, bodyName));
for (int idx = 0; idx < int(pBodies.size()); idx++)
{
std::vector<CAEFace*> pFaces(GetCaeFaceByName(pBodies[idx], faceName));
pAllFaces.insert(pAllFaces.end(), pFaces.begin(), pFaces.end());
}
return pAllFaces;
}
FEModelOccurrence* GetFEModelOccByMeshName(IHierarchicalFEModel *pHieFeModel, const std::string &meshName)
{
FEModelOccurrence *pFEModelOcc = NULL;
std::string strMeshFindName((boost::format(FIND_MESH_OCC_PATTERN_NAME) % meshName).str());
std::vector<FEModelOccurrence*> childFeModelOcc(pHieFeModel->GetChildren());
for (std::vector<FEModelOccurrence*>::iterator iter = childFeModelOcc.begin();
iter != childFeModelOcc.end(); ++iter)
{
IMeshManager *pMeshMgr = (*iter)->MeshManager();
try
{
Mesh *pRailMesh = polymorphic_cast<Mesh*>(pMeshMgr->FindObject(strMeshFindName.c_str()));
pFEModelOcc = *iter;
break;
}
catch (std::exception&)
{
}
}
return pFEModelOcc;
}
Mesh* GetMeshByName(IFEModel *pFEModel, const std::string &meshNamePattern, const std::string &meshName)
{
IMeshManager *pMeshMgr = pFEModel->MeshManager();
std::string strMeshFindName((boost::format(meshNamePattern) % meshName).str());
return polymorphic_cast<Mesh*>(pMeshMgr->FindObject(strMeshFindName.c_str()));
}
void DeleteMeshesInCollector(IFEModel *pFeModel, const std::string &meshColName)
{
std::vector<Mesh*> existingMeshes(GetMeshesInCollector(pFeModel, FIND_MESH_COL_PATTERN_NAME, meshColName));
if (!existingMeshes.empty())
{
Session *pSession = Session::GetSession();
Update *pUpdateMgr = pSession->UpdateManager();
BOOST_FOREACH(Mesh *pMesh, existingMeshes)
{
pUpdateMgr->AddToDeleteList(pMesh);
}
//std::for_each(existingMeshes.begin(), existingMeshes.end(),
// boost::bind<void, NXObject*>(boost::bind<void, NXObject*>(&Update::AddToDeleteList, pUpdateMgr), _1));
pUpdateMgr->DoUpdate(pSession->GetNewestUndoMark(Session::MarkVisibilityAnyVisibility));
}
}
std::vector<Mesh*> GetMeshesInCollector(IFEModel *pFeModel, const std::string &meshNamePattern, const std::string &meshColName)
{
IMeshManager *pMeshMgr = pFeModel->MeshManager();
std::string meshColFullName((boost::format(meshNamePattern) % meshColName).str());
IMeshCollector *pMeshCol = polymorphic_cast<IMeshCollector*>(pMeshMgr->FindObject(meshColFullName.c_str()));
// delete meshes in the collector first
std::vector<Mesh*> existingMeshes(pMeshMgr->GetMeshes());
if (!existingMeshes.empty())
{
existingMeshes.erase(std::remove_if(existingMeshes.begin(), existingMeshes.end(),
boost::bind(std::not_equal_to<IMeshCollector*>(), boost::bind(&Mesh::MeshCollector, _1), pMeshCol)),
existingMeshes.end());
//existingMeshes.erase(std::remove_if(existingMeshes.begin(), existingMeshes.end(),
// boost::bind(&Mesh::MeshCollector, _1) != lambda::var(pMeshCol)), existingMeshes.end());
}
return existingMeshes;
}
std::vector<FENode*> GetNodesOnFace(CaePart *pCaePrt, const std::vector<CAEFace*> &pFaces)
{
std::vector<FENode*> pAllNodes;
SmartSelectionManager *pSelMgr = pCaePrt->SmartSelectionMgr();
for (int idx = 0; idx < int(pFaces.size()); idx++)
{
boost::scoped_ptr<RelatedNodeMethod> pRelNodeMethod(pSelMgr->CreateRelatedNodeMethod(pFaces[idx]));
std::vector<FENode*> slabNodes(pRelNodeMethod->GetNodes());
pAllNodes.insert(pAllNodes.end(), slabNodes.begin(), slabNodes.end());
}
return pAllNodes;
}
std::vector<FENode*> GetNodeOcc(FEModelOccurrence *pFeModelOcc, int nodeOffset, const std::vector<FENode*> &nodeProtos)
{
std::vector<FENode*> nodeOccs;
boost::scoped_ptr<FENodeLabelMap> pNodeLabelMap(pFeModelOcc->FenodeLabelMap());
nodeOccs.reserve(nodeProtos.size());
BOOST_FOREACH(FENode* pNodeProto, nodeProtos)
{
FENode *pNodeOcc = pNodeLabelMap->GetNode(nodeOffset + pNodeProto->Label());
if (pNodeOcc)
nodeOccs.push_back(pNodeOcc);
}
return nodeOccs;
}
int GetNodeOffset(FEModelOccurrence *pFeModelOcc)
{
int nodeOffset;
int elemOffset;
int csysOffset;
pFeModelOcc->GetLabelOffsets(&nodeOffset, &elemOffset, &csysOffset);
return nodeOffset;
}
Mesh* GetMesh(TaggedObject *pObj)
{
Mesh *pMesh = NULL;
if (pObj)
{
tag_p_t tMeshes = NULL;
int meshCnt;
// NOTE: this method will switch display part, cause updating problem.
UF_SF_find_mesh(pObj->Tag(), UF_SF_DIMENSION_ANY, &meshCnt, &tMeshes);
boost::shared_array<tag_t> tMeshArray(tMeshes, UF_free);
if (meshCnt > 0)
{
pMesh = dynamic_cast<Mesh*>(NXObjectManager::Get(tMeshes[0]));
}
}
return pMesh;
}
FEModelOccurrence* GetFEModelOccOfNode(FEModelOccurrence *pParentFEModel, FENode *pNodeProto)
{
if (!pNodeProto)
return NULL;
// Get mesh name
// find node of mesh
std::vector<FEElement*> elemsOfNode(pNodeProto->GetElements());
Mesh *pMesh = NULL;
if (!elemsOfNode.empty())
{
pMesh = elemsOfNode.back()->Mesh();
}
if (!pMesh)
return NULL;
std::string meshName(pMesh->Name().GetText());
return GetFEModelOccByMeshName(pParentFEModel, meshName);
}
std::string GetNXVersion()
{
Session *pSession = Session::GetSession();
std::string strVer(pSession->GetEnvironmentVariableValue("UGII_FULL_VERSION").GetText());
// UGII_PRODUCT_NAME NX
// UGII_VERSION v7.5
// UGII_FULL_VERSION v7.5.2.5
// UGII_MAJOR_VERSION 7
// UGII_MINOR_VERSION 5
// UGII_SUBMINOR_VERSION 2
UF_system_info_t pInfo;
UF_ask_system_info(&pInfo);
UF_free_system_info(&pInfo);
return strVer;
}
#if 0
std::vector<Assemblies::Component*> GetOccInCompTree(Assemblies::Component *pAssemTree, Part *pPrt)
{
std::vector<Assemblies::Component*> prtOccs;
if (pAssemTree && pPrt)
{
// NOTE: There's problem with this API: Component::GetChildren()
//std::vector<Assemblies::Component*> childComps(pAssemTree->GetChildren());
tag_t *tChildOccs = NULL;
boost::shared_array<tag_t> childOccArray(tChildOccs, UF_free);
int tChildCnt = UF_ASSEM_ask_part_occ_children(pAssemTree->Tag(), &tChildOccs);
for (int idx = 0; idx < tChildCnt; idx++)
{
Component *pChildOcc = dynamic_cast<Component*>(NXObjectManager::Get(tChildOccs[idx]));
if (pChildOcc && pChildOcc->Prototype() == pPrt)
prtOccs.push_back(pChildOcc);
else
{
std::vector<Assemblies::Component*> childOccComps(GetOccInCompTree(pChildOcc, pPrt));
prtOccs.insert(prtOccs.end(), childOccComps.begin(), childOccComps.end());
}
}
}
return prtOccs;
}
#endif
}
<file_sep>/src/NXVsar/include/VsarUI_SlabSettings.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\CAE\Response\VSAR-DEV\NXProject\AppRoot\application\SlabSettings.hpp
//
// This file was generated by the NX Block Styler
// Created by: Joseph
// Version: NX 7.5
// Date: 05-08-2011 (Format: mm-dd-yyyy)
// Time: 14:00
//
//==============================================================================
#ifndef VSARUI_SLABSETTINGS_H_INCLUDED
#define VSARUI_SLABSETTINGS_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseCompDialog.hxx>
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace VsarUI
{
class SlabSettings : public BaseCompDialog
{
// class members
public:
SlabSettings();
~SlabSettings();
static void ShowDialog();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the SlabSettings.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
//int ApplyCb();
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
private:
//NXOpen::BlockStyler::UIBlock* grpGeometry;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_length;// Block type: Expression
//NXOpen::BlockStyler::UIBlock* m_eleSize;// Block type: Expression
//NXOpen::BlockStyler::UIBlock* counts;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_supportCnt;// Block type: Enumeration
//NXOpen::BlockStyler::UIBlock* grpMaterial;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_fastenerStiffness;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_fastenerDamping;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_massRatio;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_stiffnessRatio;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_dampingRatio;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_elasticModulus;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_poissonRatio;// Block type: Expression
};
}
#endif //VSARUI_SLABSETTINGS_H_INCLUDED
<file_sep>/src/VsarInit/include/Vsar_Project.hxx
#ifndef VSAR_PROJECT_H_INCLUDED
#define VSAR_PROJECT_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <string>
#include <vector>
#include <boost/format.hpp>
#include <boost/scoped_ptr.hpp>
#include <boost/filesystem.hpp>
#include <NXOpen/MenuBar_MenuButton.hxx>
#include <Vsar_Init_Names.hxx>
#include <libvsarinit_exports.h>
//------------------------------------------------------------------------------
// Namespaces needed for following template
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace NXOpen
{
class BasePart;
class Part;
namespace CAE
{
class SimPart;
class AssyFemPart;
class FemPart;
}
}
#pragma warning(push)
#pragma warning(disable:4251) // get rid of dll import and export warning
namespace Vsar
{
class BaseProjectProperty;
class VSARINITEXPORT Status
{
public:
enum ProjectStatus
{
ProjectStatus_None = 1 << 1,
ProjectStatus_Defined = 1 << 2,
ProjectStatus_ResponseSolved = 1 << 3,
ProjectStatus_ResponseNoiseSolved = 1 << 4,
ProjectStatus_NoiseSolved = 1 << 5
};
typedef struct
{
std::string m_buttonName;
//NXOpen::MenuBar::MenuButton::SensitivityStatus m_sensStatus;
unsigned int m_status;
}MenuItemSensitivity;
public:
Status();
~Status();
void Switch(ProjectStatus status);
protected:
void SetMenuItemSensitivity(const MenuItemSensitivity &menuItemData, ProjectStatus status);
};
class VSARINITEXPORT Project
{
// class members
public:
typedef enum ProjectType
{
ProjectType_Bridge,
ProjectType_Selmi_Infinite,
ProjectType_Tunnel
}ProjectType;
~Project();
static void New(const std::string &prjName, const std::string &prjPath, ProjectType prjType);
static void Open(NXOpen::BasePart* pOpenedPrt);
static void Close(NXOpen::BasePart* pOpenedPrt);
static Project* Instance();
static Status* GetStatus()
{
return &m_prjStatus;
}
BaseProjectProperty* GetProperty() const
{
return m_prjProperty.get();
}
protected:
Project(const std::string &prjName, const std::string &prjPath, ProjectType prjType);
Project(NXOpen::CAE::SimPart* pSimPrt, ProjectType prjType);
void Initilize(const std::string &prjName, const std::string &prjPath);
void LoadPrtPart(const std::string &prjPath);
void LoadSimPart(const std::string &prjName, const std::string &prjPath);
void SaveAsComp( NXOpen::BasePart * pPrt,
const boost::filesystem::path &toFilePathName);
void SetProjectAttribute(const std::string &prjName);
static ProjectType Project::GetProjectTypeOfPart(NXOpen::BasePart* pOpenedPrt);
static Status::ProjectStatus Project::GetProjectStatusOfPart(NXOpen::BasePart* pOpenedPrt);
private:
boost::scoped_ptr<BaseProjectProperty> m_prjProperty;
static boost::scoped_ptr<Project> m_prjInstance;
static Status m_prjStatus;
};
class VSARINITEXPORT BaseProjectProperty
{
public:
friend class Project;
BaseProjectProperty(NXOpen::CAE::SimPart* pSimPrt = NULL);
virtual ~BaseProjectProperty();
std::string GetProjectName() const;
std::string GetProjectPath() const;
std::string GetResponseOp2ResultName() const
{
return (boost::format(RESPONSE_OP2_RESULT_FILE_PATTERN_NAME) %
GetProjectName() % VSDANE_SOLUTION_NAME).str();
}
std::string GetResponseAfuResultName() const
{
return (boost::format(RESPONSE_AFU_RESULT_FILE_PATTERN_NAME) %
GetProjectName() % VSDANE_SOLUTION_NAME).str();
}
std::string GetNoiseIntermediateResultName() const
{
return (boost::format(NOISE_INTERMEDIATE_RESULT_FILE_PATTERN_NAME) % GetProjectName()).str();
}
std::string GetNoiseResultName() const
{
return (boost::format(NOISE_RESULT_FILE_PATTERN_NAME) % GetProjectName()).str();
}
NXOpen::CAE::SimPart* GetSimPart() const
{
return m_simPrt;
}
NXOpen::CAE::AssyFemPart* GetAFemPart() const;
NXOpen::CAE::FemPart* GetRailSlabFemPart() const;
NXOpen::Part* GetRailSlabIdeaPart() const;
NXOpen::CAE::FemPart* GetBraceFemPart() const;
NXOpen::Part* GetBraceIdeaPart() const;
NXOpen::Part* GetGeometryPart() const;
virtual int GetTemplatePartFiles(std::vector<std::string> &fileNames) const;
virtual std::string GetTemplateBaseName() const = 0;
virtual Project::ProjectType GetProjectType() const = 0;
virtual std::string GetProjectTypeName() const = 0;
virtual std::string GetBraceTemplateBaseName() const = 0;
virtual std::string GetBraceMenuItemName() const = 0;
std::string GetTemplateSimName() const
{
return GetTemplateBaseName() + "_s.sim";
}
std::string GetRootPartName() const;
protected:
void SetSimPart(NXOpen::CAE::SimPart* pSimPrt)
{
m_simPrt = pSimPrt;
}
NXOpen::BasePart* GetCompPart(const std::string &prtName) const;
NXOpen::BasePart* OpenCompPart( const std::string & prtName ) const;
private:
NXOpen::CAE::SimPart *m_simPrt;
};
class BridgeProperty : public BaseProjectProperty
{
public:
BridgeProperty(NXOpen::CAE::SimPart* pSimPrt = NULL);
virtual ~BridgeProperty();
virtual std::string GetTemplateBaseName() const;
virtual Project::ProjectType GetProjectType() const;
virtual std::string GetProjectTypeName() const;
virtual std::string GetBraceTemplateBaseName() const;
virtual std::string GetBraceMenuItemName() const;
};
class SelmiInfiniteBaseProperty : public BaseProjectProperty
{
public:
SelmiInfiniteBaseProperty(NXOpen::CAE::SimPart* pSimPrt = NULL);
virtual ~SelmiInfiniteBaseProperty();
virtual std::string GetTemplateBaseName() const;
virtual Project::ProjectType GetProjectType() const;
virtual std::string GetProjectTypeName() const;
virtual std::string GetBraceTemplateBaseName() const;
virtual std::string GetBraceMenuItemName() const;
};
class TunnelProperty : public BaseProjectProperty
{
public:
TunnelProperty(NXOpen::CAE::SimPart* pSimPrt = NULL);
virtual ~TunnelProperty();
virtual std::string GetTemplateBaseName() const;
virtual Project::ProjectType GetProjectType() const;
virtual std::string GetProjectTypeName() const;
virtual std::string GetBraceTemplateBaseName() const;
virtual std::string GetBraceMenuItemName() const;
};
}
#pragma warning(pop)
#endif //VSAR_PROJECT_H_INCLUDED
<file_sep>/src/NXVsar/src/vsarint/VsarUI_main.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_NewProject.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <uf.h>
#include <NXOpen/Session.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <NXOpen/NXException.hxx>
#include <algorithm>
#include <boost/function.hpp>
#include <boost/bind.hpp>
//#include <boost/lambda/lambda.hpp>
#include <VsarUI_NewProject.hxx>
#include <VsarUI_TrainSettings.hxx>
#include <VsarUI_RailSettings.hxx>
#include <VsarUI_SlabSettings.hxx>
#include <VsarUI_BridgeSettings.hxx>
#include <VsarUI_SelmiInfiniteSettings.hxx>
#include <VsarUI_TunnelSettings.hxx>
#include <VsarUI_SolveResponse.hxx>
#include <VsarUI_SolveNoise.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Init_Names.hxx>
#include <Vsar_Result.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
using namespace VsarUI;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
//namespace VsarUI
//{
// NewProject *theVsar_NewProject;
//}
//------------------------------- DIALOG LAUNCHING ---------------------------------
//
// Before invoking this application one needs to open any part/empty part in NX
// because of the behavior of the blocks.
//
// Make sure the dlx file is in one of the following locations:
// 1.) From where NX session is launched
// 2.) $UGII_USER_DIR/application
// 3.) For released applications, using UGII_CUSTOM_DIRECTORY_FILE is highly
// recommended. This variable is set to a full directory path to a file
// containing a list of root directories for all custom applications.
// e.g., UGII_CUSTOM_DIRECTORY_FILE=$UGII_ROOT_DIR\menus\custom_dirs.dat
//
// You can create the dialog using one of the following way:
//
// 1. USER EXIT
//
// 1) Remove the following conditional definitions:
// a) #if USER_EXIT_OR_MENU
// #endif//USER_EXIT_OR_MENU
//
// b) #if USER_EXIT
// #endif//USER_EXIT
// 2) Create the Shared Library -- Refer "Block UI Styler programmer's guide"
// 3) Invoke the Shared Library through File->Execute->NX Open menu.
//
// 2. THROUGH CALLBACK OF ANOTHER DIALOG
//
// 1) Remove the following conditional definition:
// #if CALLBACK
// #endif//CALLBACK
// 2) Call the following line of code from where ever you want to lauch this dialog.
// Vsar_NewProject->Show_Vsar_NewProject();
// 3) Integrate this file with your main application file.
//
// 3. MENU BAR
//
// 1) Remove the following conditional definition:
// a) #if USER_EXIT_OR_MENU
// #endif//USER_EXIT_OR_MENU
// 2) Add the following lines to your MenuScript file in order to
// associate a menu bar button with your dialog. In this
// example, a cascade menu will be created and will be
// located just before the Help button on the main menubar.
// The button, SAMPLEVB_BTN is set up to launch your dialog and
// will be positioned as the first button on your pulldown menu.
// If you wish to add the button to an existing cascade, simply
// add the 3 lines between MENU LAUNCH_CASCADE and END_OF_MENU
// to your menuscript file.
// The MenuScript file requires an extension of ".men".
// Move the contents between the dashed lines to your Menuscript file.
//
// !-----------------------------------------------------------------------------
// VERSION 120
//
// EDIT UG_GATEWAY_MAIN_MENUBAR
//
// BEFORE UG_HELP
// CASCADE_BUTTON BLOCKSTYLER_DLX_CASCADE_BTN
// LABEL Dialog Launcher
// END_OF_BEFORE
//
// MENU BLOCKSTYLER_DLX_CASCADE_BTN
// BUTTON SAMPLEVB_BTN
// LABEL Display SampleVB dialog
// ACTIONS <path of Shared library> !For example: D:\temp\SampleVB.dll
// END_OF_MENU
// !-----------------------------------------------------------------------------
//
// 3) Make sure the .men file is in one of the following locations:
//
// - $UGII_USER_DIR/startup
// - For released applications, using UGII_CUSTOM_DIRECTORY_FILE is highly
// recommended. This variable is set to a full directory path to a file
// containing a list of root directories for all custom applications.
// e.g., UGII_CUSTOM_DIRECTORY_FILE=$UGII_ROOT_DIR\menus\custom_dirs.dat
//
//------------------------------------------------------------------------------
//#if USER_EXIT_OR_MENU
struct MenuItemCallbackInfo
{
std::string m_menuItemName;
boost::function<void ()> m_callback;
};
extern "C" DllExport void ufusr(char *param, int *retcod, int param_len)
{
if (!UF_is_initialized())
UF_initialize();
MenuItemCallbackInfo menuItemCbs[] = {
{MENU_ITEM_NAME_NEW_PROJECT, NewProject::ShowDialog},
{MENU_ITEM_NAME_SET_TRAIN, TrainSettings::ShowDialog},
{MENU_ITEM_NAME_SET_RAIL, RailSettings::ShowDialog},
{MENU_ITEM_NAME_SET_SLAB, SlabSettings::ShowDialog},
{MENU_ITEM_NAME_SET_BRIDGE, BridgeSettings::ShowDialog},
{MENU_ITEM_NAME_SET_BASE, SelmiInfiniteSettings::ShowDialog},
{MENU_ITEM_NAME_SET_TUNNEL, TunnelSettings::ShowDialog},
{MENU_ITEM_NAME_SOLVE_RESPONSE, SolveResponse::ShowDialog},
{MENU_ITEM_NAME_SOLVE_NOISE, SolveNoise::ShowDialog},
{MENU_ITEM_NAME_LOAD_RESULT, ResultsLoader()}
};
std::string menuItemName(param ? param : "");
MenuItemCallbackInfo *pMenuItem = std::find_if(menuItemCbs, menuItemCbs + N_ELEMENTS(menuItemCbs),
boost::bind(&MenuItemCallbackInfo::m_menuItemName, _1) == menuItemName);
if (pMenuItem != menuItemCbs + N_ELEMENTS(menuItemCbs))
pMenuItem->m_callback();
UF_terminate();
}
//#endif//USER_EXIT_OR_MENU
//#if USER_EXIT
//------------------------------------------------------------------------------
// This method specifies how a shared image is unloaded from memory
// within NX. This method gives you the capability to unload an
// internal NX Open application or user exit from NX. Specify any
// one of the three constants as a return value to determine the type
// of unload to perform:
//
//
// Immediately : unload the library as soon as the automation program has completed
// Explicitly : unload the library from the "Unload Shared Image" dialog
// AtTermination : unload the library when the NX session terminates
//
//
// NOTE: A program which associates NX Open applications with the menubar
// MUST NOT use this option since it will UNLOAD your NX Open application image
// from the menubar.
//------------------------------------------------------------------------------
extern "C" DllExport int ufusr_ask_unload()
{
#if defined (DEBUG) || (_DEBUG)
return (int)Session::LibraryUnloadOptionImmediately;
#else
return (int)Session::LibraryUnloadOptionExplicitly;
#endif
//return (int)Session::LibraryUnloadOptionAtTermination;
}
//------------------------------------------------------------------------------
// Following method cleanup any housekeeping chores that may be needed.
// This method is automatically called by NX.
//------------------------------------------------------------------------------
extern "C" DllExport void ufusr_cleanup(void)
{
try
{
//---- Enter your callback code here -----
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
NewProject::s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//#endif//USER_EXIT
<file_sep>/src/NXVsar/include/VsarUI_BaseDialog.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_BaseDialog.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32
//
//==============================================================================
#ifndef VSARUI_BASEDIALOG_H_INCLUDED
#define VSARUI_BASEDIALOG_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <string>
#include <boost/scoped_ptr.hpp>
#include <NXOpen/BlockStyler_BlockDialog.hxx>
//------------------------------------------------------------------------------
// Namespaces needed for following template
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace NXOpen
{
class Session;
class UI;
class TaggedObject;
namespace BlockStyler
{
class UIBlock;
class BlockDialog;
}
}
namespace VsarUI
{
class BaseDialog
{
// class members
public:
static NXOpen::Session *s_theSession;
static NXOpen::UI *s_theUI;
int Show(NXOpen::BlockStyler::BlockDialog::DialogMode dialogMode = NXOpen::BlockStyler::BlockDialog::DialogModeCreate);
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the Vsar_BaseDialog.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb() = 0;
virtual void DialogShownCb();
virtual int ApplyCb();
virtual int OkCb();
virtual bool Okay();
virtual int CancelCb();
virtual int FilterCb(NXOpen::BlockStyler::UIBlock *pBlock, NXOpen::TaggedObject *pSel);
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* pBlock);
virtual void FocusNotifyCb(NXOpen::BlockStyler::UIBlock* pBlock, bool focus);
virtual void KeyboardFocusNotifyCb(NXOpen::BlockStyler::UIBlock* pBlock, bool focus);
protected:
BaseDialog(const std::string &dialogName);
virtual ~BaseDialog();
protected:
boost::scoped_ptr<NXOpen::BlockStyler::BlockDialog> m_theDialog;
};
}
//#if defined(__MSVC_RUNTIME_CHECKS)
//#undef __MSVC_RUNTIME_CHECKS
//#endif
//#if !defined(_DEBUG)
//#define _DEBUG
//#endif
#endif //VSARUI_BASEDIALOG_H_INCLUDED
<file_sep>/src/NXVsar/include/VsarUI_SolveResponse.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Solve.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 05-23-2011 (Format: mm-dd-yyyy)
// Time: 20:48
//
//==============================================================================
#ifndef VSARUI_SOLVERESPONSE_H_INCLUDED
#define VSARUI_SOLVERESPONSE_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseCompDialog.hxx>
namespace NXOpen
{
class TaggedObject;
}
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace VsarUI
{
class SolveResponse : public BaseCompDialog
{
public:
enum SelectionOutputType
{
Selection_Output_Elements = 0,
Selection_Output_Nodes
};
// class members
public:
SolveResponse();
~SolveResponse();
static void ShowDialog();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the Solve.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
virtual int ApplyCb();
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
virtual bool Okay();
protected:
bool CanOutputElements() const;
std::vector<NXOpen::TaggedObject*> GetOutputElements() const;
void SetOutputElements(const std::vector<NXOpen::TaggedObject*> &);
bool CanOutputNodes() const;
std::vector<NXOpen::TaggedObject*> GetOutputNodes() const;
void SetOutputNodes(const std::vector<NXOpen::TaggedObject*> &);
bool CanOutputNodesForNoise() const;
private:
//NXOpen::BlockStyler::UIBlock* trainSettingsGrp;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_trainSpeed;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_timeStep;// Block type: Expression
//NXOpen::BlockStyler::UIBlock* outputGrp;// Block type: Group
//NXOpen::BlockStyler::UIBlock* m_hasTimeOutput;// Block type: Toggle
//NXOpen::BlockStyler::UIBlock* m_outputTime;// Block type: Expression
//NXOpen::BlockStyler::UIBlock* m_hasNodesOutput;// Block type: Toggle
NXOpen::BlockStyler::UIBlock* m_selOutputType;// Block type: Enumeration
NXOpen::BlockStyler::UIBlock* m_hasElemsOutput;// Block type: Toggle
NXOpen::BlockStyler::UIBlock* m_outputElements;// Block type: Select Elements
NXOpen::BlockStyler::UIBlock* m_hasNodesOutput;// Block type: Toggle
NXOpen::BlockStyler::UIBlock* m_outputNodes;// Block type: Select Nodes
NXOpen::BlockStyler::UIBlock* m_hasNoiseNodeOutput;// Block type: Toggle
NXOpen::BlockStyler::UIBlock* m_loadBtn; // Block type: Button
};
}
#endif //VSARUI_SOLVERESPONSE_H_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_Result.hxx
#ifndef VSAR_RESULT_H_INCLUDED
#define VSAR_RESULT_H_INCLUDED
#include <string>
#include <vector>
#include <boost/shared_ptr.hpp>
#include <boost/filesystem.hpp>
#include <NXOpen\CAE_XYFunctionDataTypes.hxx>
namespace NXOpen
{
class Point;
//namespace CAE
//{
// class AfuData;
//}
}
namespace Vsar
{
class ResultBlock;
class BaseResult
{
public:
BaseResult()
{
}
virtual ~BaseResult() = 0;
bool IsResultExist() const;
bool Load() const;
virtual std::string GetResultPathName() const = 0;
};
class ResponseOp2Result : public BaseResult
{
public:
ResponseOp2Result() : BaseResult()
{
}
virtual ~ResponseOp2Result()
{
}
virtual std::string GetResultPathName() const;
};
class BaseAfuResult : public BaseResult
{
public:
BaseAfuResult()
{
}
virtual ~BaseAfuResult()
{
}
void Create();
protected:
void CreateResultFile();
virtual void CreateRecords() = 0;
};
struct ResponseRecordItem;
class ResponseAfuResult : public BaseAfuResult
{
public:
ResponseAfuResult(const std::string &inputPath) : BaseAfuResult(), m_inputPath(inputPath)
{
}
virtual ~ResponseAfuResult()
{
}
virtual std::string GetResultPathName() const;
protected:
virtual void CreateRecords();
void CreateRecord(const ResponseRecordItem &recordItem);
void ReadDataFromDat(const ResponseRecordItem &recordItem,
std::vector<double> &xValues, std::vector<double> &yValues) const;
//virtual StlResultBlockVector ExtractContent(std::ifstream &iResult);
private:
std::string m_inputPath;
};
typedef std::vector<boost::shared_ptr<ResultBlock>> StlResultBlockVector;
class NastranResult : public BaseAfuResult
{
public:
NastranResult() : BaseAfuResult()
{
}
virtual ~NastranResult()
{
}
protected:
std::string GetNastranResultPathName() const;
virtual void CreateRecords();
virtual StlResultBlockVector ExtractContent(std::ifstream &iResult) = 0;
template<typename BlockType>
StlResultBlockVector ReadDataBlock(std::ifstream &ifStream);
};
#if 0
class ResponseResult : public NastranResult
{
public:
ResponseResult() : NastranResult()
{
}
virtual ~ResponseResult()
{
}
virtual std::string GetResultPathName() const;
protected:
virtual StlResultBlockVector ExtractContent(std::ifstream &iResult);
};
#endif
class NoiseIntermResult : public NastranResult
{
public:
NoiseIntermResult() : NastranResult()
{
}
virtual ~NoiseIntermResult()
{
}
virtual std::string GetResultPathName() const;
protected:
virtual StlResultBlockVector ExtractContent(std::ifstream &iResult);
};
class NoiseResult : public BaseAfuResult
{
public:
NoiseResult(const boost::filesystem::path &srcDir,
const std::vector<NXOpen::Point*> &pts) : BaseAfuResult(),
m_srcDir(srcDir), m_outputPoints(pts)
{
}
virtual ~NoiseResult()
{
}
virtual std::string GetResultPathName() const;
protected:
virtual void CreateRecords();
void WriteRecord(const std::string &recordName, NXOpen::CAE::XyFunctionDataType funcType,
NXOpen::CAE::XyFunctionUnit xUnit, NXOpen::CAE::XyFunctionUnit yUnit,
const std::vector<double> &xValues, const std::vector<double> &yValues);
#if 0
void WriteRecord(const std::string &recordName, NXOpen::CAE::XyFunctionDataType funcType,
NXOpen::CAE::XyFunctionUnit xUnit, NXOpen::CAE::XyFunctionUnit yUnit,
const std::vector<double> &xValues,
const std::vector<double> &yRealValues, const std::vector<double> &yImagValues);
#endif
void ReadDataFromDat(const std::string &noiseOutputName,
std::vector<double> &xValues, std::vector<double> &yValues) const;
void OutputTimeRecord(int idxRecord);
void OutputFreqRecord(int idxRecord);
private:
boost::filesystem::path m_srcDir;
const std::vector<NXOpen::Point*> &m_outputPoints;
};
class ResultsLoader
{
public:
ResultsLoader()
{
}
~ResultsLoader()
{
}
void operator () ();
};
}
#endif //VSAR_RESULT_H_INCLUDED
<file_sep>/src/NXVsar/src/vsarint/VsarUI_SolveResponse.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Solve.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 05-23-2011 (Format: mm-dd-yyyy)
// Time: 20:48 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_SolveResponse.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <NXOpen/BlockStyler_UIBlock.hxx>
#include <NXOpen/BlockStyler_PropertyList.hxx>
#include <Vsar_SolveOperation.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Solution.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
namespace VsarUI
{
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
SolveResponse::SolveResponse() : BaseCompDialog("SolveResponse.dlx", new ResponseSolution())
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
SolveResponse::~SolveResponse()
{
}
//------------------------------------------------------------------------------
//Method name: Show_Solve
//------------------------------------------------------------------------------
void SolveResponse::ShowDialog()
{
boost::scoped_ptr<SolveResponse> pSolveDlg(new SolveResponse());
try
{
// The following method shows the dialog immediately
pSolveDlg->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void SolveResponse::InitializeCb()
{
BaseCompDialog::InitializeCb();
try
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
//trainSettingsGrp = pTopBlock->FindBlock("trainSettingsGrp");
m_trainSpeed = pTopBlock->FindBlock(TRAIN_SPEED_ID_NAME);
m_timeStep = pTopBlock->FindBlock(COMPUTE_TIME_STEP_ID_NAME);
//outputGrp = pTopBlock->FindBlock("outputGrp");
//m_hasTimeOutput = pTopBlock->FindBlock("hasTimeOutput");
//m_outputTime = pTopBlock->FindBlock("outputTime");
m_selOutputType = pTopBlock->FindBlock("selOutputType");
m_hasElemsOutput = pTopBlock->FindBlock("hasElemsOutput");
m_outputElements = pTopBlock->FindBlock("outputElements");
m_hasNodesOutput = pTopBlock->FindBlock("hasNodesOutput");
m_outputNodes = pTopBlock->FindBlock("outputNode");
m_hasNoiseNodeOutput = pTopBlock->FindBlock("hasNoiseNodeOutput");
m_loadBtn = pTopBlock->FindBlock("loadBtn");
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void SolveResponse::DialogShownCb()
{
try
{
//boost::scoped_ptr<PropertyList> pHasTimeOutputPropList(m_selOutputType->GetProperties());
//boost::scoped_ptr<PropertyList> pHasNodesPropList(m_outputElements->GetProperties());
//boost::scoped_ptr<PropertyList> pHasNodesPropList(m_outputNode->GetProperties());
//pHasTimeOutputPropList->SetLogical("Value", true);
//pHasNodesPropList->SetLogical("Value", true);
UpdateCb(m_hasElemsOutput);
UpdateCb(m_hasNodesOutput);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
int SolveResponse::ApplyCb()
{
int errorCode = 0;
try
{
bool canOutputElems(CanOutputElements());
std::vector<TaggedObject*> outputElems(GetOutputElements());
bool canOutputNodes(CanOutputNodes());
std::vector<TaggedObject*> outputNodes(GetOutputNodes());
SolveSettings solveSettings(canOutputElems, outputElems,
canOutputNodes, outputNodes, CanOutputNodesForNoise());
solveSettings.Apply();
SolveResponseOperation solveOper;
solveOper.Execute();
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
errorCode = 1;
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return errorCode;
}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int SolveResponse::UpdateCb(UIBlock* block)
{
try
{
if(block == m_selOutputType)
{
boost::scoped_ptr<PropertyList> pselOutputTypePropList(m_selOutputType->GetProperties());
boost::scoped_ptr<PropertyList> pOutputElemsPropList(m_outputElements->GetProperties());
boost::scoped_ptr<PropertyList> pOutputNodesPropList(m_outputNodes->GetProperties());
SelectionOutputType selOutputType = static_cast<SelectionOutputType>(pselOutputTypePropList->GetEnum("Value"));
pOutputElemsPropList->SetLogical("Show", selOutputType == Selection_Output_Elements);
if (selOutputType == Selection_Output_Elements)
m_outputElements->Focus();
pOutputNodesPropList->SetLogical("Show", selOutputType == Selection_Output_Nodes);
if (selOutputType == Selection_Output_Nodes)
m_outputNodes->Focus();
}
else if(block == m_hasElemsOutput)
{
boost::scoped_ptr<PropertyList> pOutputElemsPropList(m_outputElements->GetProperties());
pOutputElemsPropList->SetLogical("Enable", CanOutputElements());
}
else if(block == m_hasNodesOutput)
{
boost::scoped_ptr<PropertyList> pOutputNodesPropList(m_outputNodes->GetProperties());
pOutputNodesPropList->SetLogical("Enable", CanOutputNodes());
}
else if(block == m_loadBtn)
{
SolveResponseOperation solveOper;
solveOper.LoadResult();
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
bool SolveResponse::Okay()
{
return CanOutputNodesForNoise();
}
bool SolveResponse::CanOutputElements() const
{
boost::scoped_ptr<PropertyList> pHasElemsPropList(m_hasElemsOutput->GetProperties());
return pHasElemsPropList->GetLogical("Value");
}
std::vector<TaggedObject*> SolveResponse::GetOutputElements() const
{
boost::scoped_ptr<PropertyList> pOutputElemsPropList(m_outputElements->GetProperties());
return pOutputElemsPropList->GetTaggedObjectVector("SelectedObjects");
}
void SolveResponse::SetOutputElements(const std::vector<TaggedObject*> &outputElems)
{
boost::scoped_ptr<PropertyList> pOutputElemsPropList(m_outputElements->GetProperties());
pOutputElemsPropList->SetTaggedObjectVector("SelectedObjects", outputElems);
}
bool SolveResponse::CanOutputNodes() const
{
boost::scoped_ptr<PropertyList> pHasNodesPropList(m_hasNodesOutput->GetProperties());
return pHasNodesPropList->GetLogical("Value");
}
std::vector<TaggedObject*> SolveResponse::GetOutputNodes() const
{
boost::scoped_ptr<PropertyList> pOutputNodesPropList(m_outputNodes->GetProperties());
return pOutputNodesPropList->GetTaggedObjectVector("SelectedObjects");
}
void SolveResponse::SetOutputNodes(const std::vector<TaggedObject*> &outputNodes)
{
boost::scoped_ptr<PropertyList> pOutputNodesPropList(m_outputNodes->GetProperties());
pOutputNodesPropList->SetTaggedObjectVector("SelectedObjects", outputNodes);
}
bool SolveResponse::CanOutputNodesForNoise() const
{
boost::scoped_ptr<PropertyList> pHasNoiseNodesPropList(m_hasNoiseNodeOutput->GetProperties());
return pHasNoiseNodesPropList->GetLogical("Value");
}
}
<file_sep>/src/VsarInit/include/Vsar_Init_Utils.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_NewProject.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32
//
//==============================================================================
#ifndef VSAR_INIT_UTILS_H_INCLUDED
#define VSAR_INIT_UTILS_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <string>
#include <boost/scope_exit.hpp>
#include <libvsarinit_exports.h>
namespace Vsar
{
#if defined _WIN32
#define PATH_DELIM '\\'
#elif defined _WIN64
#define PATH_DELIM '\\'
#else
#define PATH_DELIM '/'
#endif
#define DELETE_CLASS_POINTER(ptr) \
BOOST_SCOPE_EXIT((&ptr)) \
{ \
delete ptr; \
ptr = NULL; \
} \
BOOST_SCOPE_EXIT_END
#ifndef MSGTXT
/*
This macro is wrapped around UG strings that need to be converted to
users' native languages (NLM). Since some of these strings are present
in the OM error reporting code, the definition needs to be in a file
available to all UG code and to Infomanager code too.
*/
#define MSGTXT(s) s
#endif
#ifndef N_ELEMENTS
/*
* Macro N_ELEMENTS which determines the number of elements in an
* array of any type. Example usage:
* qsort(list, N_ELEMENTS(list), sizeof(list[0]), compare);
* for (indx=0; indx<N_ELEMENTS(list); indx++) {
*/
#define N_ELEMENTS(array) ((unsigned int)(sizeof(array)/sizeof(array[0])))
#endif
VSARINITEXPORT std::string GetInstallPath();
VSARINITEXPORT void CopyFile(const std::string &fromPath, const std::string &fromFileName,
const std::string &toPath, const std::string &toFileName);
VSARINITEXPORT void CopyDirectory(const std::string &fromPath, const std::string &toPath);
VSARINITEXPORT int LoadResult(const std::string &resultPathName);
}
#endif //VSAR_INIT_UTILS_H_INCLUDED
<file_sep>/src/VsarInit/src/vsar/Vsar_Init_Utils.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Vsar_NewProject.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: wujif
// Version: NX 7.5
// Date: 02-19-2011 (Format: mm-dd-yyyy)
// Time: 17:32 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <Vsar_Init_Utils.hxx>
#if defined(_WIN32) || defined(_WIN64)
#include <Windows.h>
#endif
#include <boost/filesystem.hpp>
#include <NXOpen/NXException.hxx>
#include <NXOpen/CAE_FTK_DataManager.hxx>
namespace Vsar
{
std::string GetInstallPath()
{
std::string installPath;
#if defined(_WIN32) || defined(_WIN64)
MEMORY_BASIC_INFORMATION mbi;
VirtualQuery(GetInstallPath, &mbi, sizeof(mbi));
HMODULE hModule = static_cast<HMODULE>(mbi.AllocationBase);
//hModule = GetModuleHandle("libvsarinit.dll");
char fullPathName[_MAX_PATH];
GetModuleFileName(hModule, fullPathName, _MAX_PATH);
boost::filesystem::path dllPath(fullPathName);
installPath = dllPath.parent_path().parent_path().string();
#else
// TODO: add more check
installPath = getenv("UGII_USER_DIR");
if (*installPath.rbegin() != PATH_DELIM)
installPath.push_back(PATH_DELIM);
#endif
return installPath;
}
void CopyFile(const std::string &fromPath, const std::string &fromFileName,
const std::string &toPath, const std::string &toFileName)
{
boost::filesystem::path fromFilePathName(fromPath);
boost::filesystem::path toFilePathName(toPath);
fromFilePathName /= fromFileName;
toFilePathName /= toFileName;
if (boost::filesystem::exists(toFilePathName))
boost::filesystem::remove_all(toFilePathName);
boost::filesystem::copy_file(fromFilePathName, toFilePathName);
}
void CopyDirectory(const std::string &fromPathName, const std::string &toPathName)
{
namespace bfs = boost::filesystem;
// Copy part files to location
const bfs::path fromPath(fromPathName);
bfs::directory_iterator iter(fromPath);
const bfs::path toPath(toPathName);
bfs::path fromFilePathName;
bfs::path toFilePathName;
// copy all the files in template folder "common" to project path
while (iter != bfs::directory_iterator())
{
fromFilePathName = iter->path();
toFilePathName = toPath / fromFilePathName.filename();
if (bfs::exists(toFilePathName))
bfs::remove_all(toFilePathName);
bfs::copy_file(fromFilePathName, toFilePathName);
++iter;
}
}
int LoadResult(const std::string &resultPathName)
{
NXOpen::CAE::FTK::DataManager *pDataMgr = NXOpen::Session::GetSession()->DataManager();
int rtc = 0;
if (boost::filesystem::exists(resultPathName))
{
try
{
pDataMgr->LoadFile(resultPathName.c_str());
}
catch (NXOpen::NXException &ex)
{
rtc = ex.ErrorCode();
}
}
return rtc;
}
}<file_sep>/src/NXVsdaneWrap/NXVsdaneWrap/include/NXVsdaneDotNet.hxx
// NXDotNetWrap.h
#ifndef VSDANE_NXVSDANEDOTNET_HXX_INCLUDED
#define VSDANE_NXVSDANEDOTNET_HXX_INCLUDED
#pragma once
#pragma managed
//#using <NXOpen.dll>
namespace Vsdane
{
public ref class NXVsdane
{
public:
void CreateSweptMeshDotNet(NXOpen::CAE::IFEModel ^pFeModel,
System::String^ meshColName, System::String ^meshName,
NXOpen::CAE::CAEFace ^pSrcFace, NXOpen::CAE::CAEFace ^pTargetFace, NXOpen::Expression ^pEleSize);
};
}
#endif //VSDANE_NXVSDANEDOTNET_HXX_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_SolveOperation.hxx
#ifndef VSAR_SOLVEOPERATION_H_INCLUDED
#define VSAR_SOLVEOPERATION_H_INCLUDED
#include <string>
#include <vector>
#include <boost/filesystem.hpp>
#include <boost/tuple/tuple.hpp>
namespace NXOpen
{
class TaggedObject;
class Point;
namespace CAE
{
class FEModelOccurrence;
class FENode;
}
}
namespace Vsar
{
//////////////////////////////////////////////////////////////////////////
// Solve Operations
class BaseSolveOperation
{
public:
BaseSolveOperation();
virtual ~BaseSolveOperation() = 0;
void Execute();
boost::filesystem::path GetWorkDir() const
{
return m_workDir;
}
boost::filesystem::path GetSolutionDir() const
{
return m_solDir;
}
virtual void LoadResult() = 0;
virtual void CleanResult() = 0;
protected:
void CreateWorkDir();
void CleanResultFile(const std::string &resultName);
virtual void PreExecute() = 0;
virtual void Solve() = 0;
virtual bool CanAutoLoadResult() const = 0;
//virtual void PrepareInputFiles() const = 0;
protected:
boost::filesystem::path m_workDir;
boost::filesystem::path m_solDir;
};
class SolveResponseOperation : public BaseSolveOperation
{
public:
SolveResponseOperation();
virtual ~SolveResponseOperation();
virtual void LoadResult();
virtual void CleanResult();
protected:
virtual void PreExecute();
virtual void Solve();
virtual bool CanAutoLoadResult() const;
};
class SolveNoiseOperation : public BaseSolveOperation
{
public:
SolveNoiseOperation(const std::vector<NXOpen::Point*> &pts);
virtual ~SolveNoiseOperation();
virtual void LoadResult();
virtual void CleanResult();
protected:
virtual void PreExecute();
virtual void Solve();
virtual bool CanAutoLoadResult() const;
private:
std::vector<NXOpen::Point*> m_outputPoints;
};
//////////////////////////////////////////////////////////////////////////
// Compute Tasks
class BaseTask
{
public:
BaseTask(const BaseSolveOperation *solOper);
virtual ~BaseTask() = 0;
virtual void Run();
virtual std::vector<std::string> GetOutputResults() const = 0;
protected:
virtual void CleanResults() const;
virtual void PrepareInput() = 0;
virtual void CallExecutable() const;
virtual void MoveOutputs() const;
virtual std::string GetExecutableName() const = 0;
virtual std::string GetSuccessLog() const = 0;
virtual std::string GetFailLog() const = 0;
protected:
const BaseSolveOperation *m_solOper;
};
class ComputeExcitationTask : public BaseTask
{
public:
ComputeExcitationTask(const BaseSolveOperation *solOper);
~ComputeExcitationTask();
virtual std::vector<std::string> GetOutputResults() const;
protected:
virtual void PrepareInput();
virtual std::string GetExecutableName() const;
virtual std::string GetSuccessLog() const;
virtual std::string GetFailLog() const;
};
class ConvertExcitationTask : public BaseTask
{
public:
ConvertExcitationTask(const BaseSolveOperation *solOper);
~ConvertExcitationTask();
virtual std::vector<std::string> GetOutputResults() const;
protected:
virtual void PrepareInput();
virtual std::string GetExecutableName() const;
virtual std::string GetSuccessLog() const;
virtual std::string GetFailLog() const;
NXOpen::CAE::FEModelOccurrence* GetRailFEModelOcc() const;
std::vector<tag_t> GetRailNodes();
void WriteInputData(std::vector<tag_t> &railNodes) const;
private:
int m_nodeOffset;
};
//////////////////////////////////////////////////////////////////////////
// excitation input
class BaseExeInput
{
public:
BaseExeInput(const boost::filesystem::path &targetDir) : m_targetDir(targetDir)
{
}
virtual ~BaseExeInput()
{
}
virtual void Generate() const = 0;
protected:
boost::filesystem::path m_targetDir;
};
class ExcitationInput : public BaseExeInput
{
public:
struct InputItem
{
std::string m_partName;
std::string m_expName;
// conversion
std::string m_targetUnitName; // to SI Unit
};
typedef std::vector<InputItem> StlInputItemVector;
public:
ExcitationInput(const boost::filesystem::path &targetDir) : BaseExeInput(targetDir)
{
}
virtual ~ExcitationInput()
{
}
virtual void Generate() const;
protected:
void CopyIrrData() const;
void WriteVehicleData() const;
void WriteRailData() const;
void WriteSlabData() const;
void WriteBeamData() const;
void WriteCalculationData() const;
void WriteInputData(const StlInputItemVector &vInputItems, const std::string &fileName) const;
};
//////////////////////////////////////////////////////////////////////////
// Noise input
class NoiseInput : public BaseExeInput
{
public:
NoiseInput(const boost::filesystem::path &targetDir,
const std::vector<NXOpen::Point*> &pts) : BaseExeInput(targetDir), m_outputPoints(pts), m_refNodeSeq()
{
ConstructRefNodeSequence();
}
virtual ~NoiseInput()
{
}
virtual void Generate() const;
protected:
void ConstructRefNodeSequence();
std::string GetIntermediateResult() const;
std::string GetTargetInputName(const std::string &recordName) const;
void WriteFrequenceData() const;
void WriteRecord(const std::string &recordName, const std::vector<double> &freqVals,
const std::vector<double> &yReals, const std::vector<double> &yImags) const;
void WriteOutputPoints() const;
NXOpen::Point* GetSlabCenter() const;
private:
const std::vector<NXOpen::Point*> &m_outputPoints;
std::vector<NXOpen::TaggedObject*> m_refNodeSeq;
};
typedef boost::tuple<std::string, bool> OutputRequestItem;
//////////////////////////////////////////////////////////////////////////
// Solver settings
class SolveSettings
{
public:
SolveSettings(bool bOutputElems, const std::vector<NXOpen::TaggedObject*> &outputElems,
bool bOutputNodes, const std::vector<NXOpen::TaggedObject*> &outputNodes,
bool bOutputNodesForNoise);
~SolveSettings()
{
}
void Apply();
protected:
void SetEntityGroup(const std::string &groupName,
const std::vector<NXOpen::TaggedObject*> &outputEntities);
void SetRunJobInForeground();
void CheckConstraints();
void SetResponseOutput();
void SetNoiseOutput();
void SetTimeStep();
void OpenOutputRequests(const std::string &oObjName, const std::vector<OutputRequestItem> &outputReqItems);
private:
bool m_bOutputElems;
std::vector<NXOpen::TaggedObject*> m_outputElems;
bool m_bOutputNodes;
std::vector<NXOpen::TaggedObject*> m_outputNodes;
bool m_bOutputNodesForNoise;
};
}
#endif //VSAR_SOLVEOPERATION_H_INCLUDED
<file_sep>/src/NXVsar/src/vsar/Vsar_Rail.cxx
#include <uf_defs.h>
#include <Vsar_Rail.hxx>
//#include <uf.h>
//#include <uf_ui.h>
//
//#include <boost/filesystem.hpp>
//#include <boost/scope_exit.hpp>
//
//#include <NXOpen/Session.hxx>
//#include <NXOpen/NXException.hxx>
//#include <NXOpen/PartCollection.hxx>
//#include <NXOpen/Part.hxx>
#include <Vsar_Project.hxx>
using namespace NXOpen;
using namespace Vsar;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
}<file_sep>/src/NXVsar/src/vsarint/VsarUI_SelmiInfiniteSettings.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\CAE\Response\VSAR-DEV\NXProject\AppRoot\application\SelmiInfinite.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: Joseph
// Version: NX 7.5
// Date: 05-08-2011 (Format: mm-dd-yyyy)
// Time: 14:03 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_SelmiInfiniteSettings.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <Vsar_Component.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_SelmiInfiniteBase.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
namespace VsarUI
{
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
SelmiInfiniteSettings::SelmiInfiniteSettings() : BaseCompDialog("Selmi-Infinite.dlx",
new SelmiInfiniteBase())
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
SelmiInfiniteSettings::~SelmiInfiniteSettings()
{
}
void SelmiInfiniteSettings::ShowDialog()
{
boost::scoped_ptr<SelmiInfiniteSettings> pRailSettingDlg(new SelmiInfiniteSettings());
try
{
// The following method shows the dialog immediately
pRailSettingDlg->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void SelmiInfiniteSettings::InitializeCb()
{
BaseCompDialog::InitializeCb();
try
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
//grpGeometry = pTopBlock->FindBlock("grpGeometry");
m_width = pTopBlock->FindBlock(WIDTH_ID_NAME);
m_height = pTopBlock->FindBlock(HEIGHT_ID_NAME);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void SelmiInfiniteSettings::DialogShownCb()
{
try
{
//---- Enter your callback code here -----
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
//int SelmiInfinite::ApplyCb()
//{
// int errorCode = 0;
// try
// {
// //---- Enter your callback code here -----
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// errorCode = 1;
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
// return errorCode;
//}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int SelmiInfiniteSettings::UpdateCb(UIBlock* block)
{
try
{
if(block == m_width)
{
//---------Enter your code here-----------
}
else if(block == m_height)
{
//---------Enter your code here-----------
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
}
<file_sep>/src/NXVsar/include/Vsar_Solution.hxx
#ifndef VSAR_SOLUTION_H_INCLUDED
#define VSAR_SOLUTION_H_INCLUDED
#include <Vsar_Component.hxx>
namespace Vsar
{
class BaseSolution : public BaseComponent
{
public:
virtual ~BaseSolution() = 0;
virtual void OnInit();
protected:
inline BaseSolution() : BaseComponent(NULL, 0)
{
}
inline BaseSolution(const CompAttrInfo *pCompAttrs, int compAttrCnt) : BaseComponent(pCompAttrs, compAttrCnt)
{
}
virtual bool HasGeometryDependency() const;
virtual bool CanUpdateRailSlabFEModel() const;
virtual bool CanUpdateBraseFEModel() const;
virtual bool CanUpdateRailSlabConnection() const;
virtual bool CanUpdateBraseConnection() const;
private:
};
class ResponseSolution : public BaseSolution
{
public:
ResponseSolution();
virtual ~ResponseSolution();
};
class NoiseSolution : public BaseSolution
{
public:
NoiseSolution();
virtual ~NoiseSolution();
};
}
#endif //VSAR_SOLUTION_H_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_Rail.hxx
#ifndef VSAR_RAIL_H_INCLUDED
#define VSAR_RAIL_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <string>
//#include <boost/scoped_ptr.hpp>
namespace Vsar
{
}
#endif //VSAR_RAIL_H_INCLUDED
<file_sep>/src/NXVsdaneWrap/NXVsdaneWrap/include/NXVsdaneBridge.hxx
// NXDotNetWrap.h
#ifndef VSDANE_NXVSDANEBRIDGE_HXX_INCLUDED
#define VSDANE_NXVSDANEBRIDGE_HXX_INCLUDED
#pragma once
#pragma unmanaged
//#using <NXOpen.dll>
//using namespace System;
#include <string>
//#include <uf_defs.h>
//namespace NXOpen
//{
// class Expression;
//
// namespace CAE
// {
// class IFEModel;
// class CAEFace;
// }
//}
#pragma managed
#include <vcclr.h>
#include "NXVsdaneDotNet.hxx"
typedef unsigned int tag_t;
namespace Vsdane
{
class NXVsdaneBridge
{
public:
NXVsdaneBridge();
//static void CreateSweptMesh(void *pFeModel,
// const std::string &meshColName, const std::string &meshName,
// void *pSrcFace, void *pTargetFace, void *pEleSize);
void CreateSweptMesh(tag_t tFeModel,
const std::string &meshColName, const std::string &meshName,
tag_t tSrcFace, tag_t tTargetFace, tag_t tpEleSize);
static System::String^ UnmanagedString2ManagedString(const char* pIn);
//generic<typename T>
//static T^ UnmanagedPtr2ManagedType(void *);
static NXOpen::CAE::IFEModel^ UnmanagedPtr2ManagedIFEModel(void *);
static NXOpen::CAE::CAEFace^ UnmanagedPtr2ManagedCAEFace(void *);
static NXOpen::Expression^ UnmanagedPtr2ManagedExpression(void *);
private:
gcroot<NXVsdane^> m_impl;
};
}
#endif //VSDANE_NXVSDANEBRIDGE_HXX_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_SelmiInfiniteBase.hxx
#ifndef VSAR_SELMIINFINITEBASE_H_INCLUDED
#define VSAR_SELMIINFINITEBASE_H_INCLUDED
#include <Vsar_Component.hxx>
namespace Vsar
{
class SelmiInfiniteBase : public BaseComponent
{
public:
SelmiInfiniteBase();
~SelmiInfiniteBase();
virtual void OnInit();
protected:
virtual bool CanUpdateRailSlabFEModel() const;
virtual bool CanUpdateBraseFEModel() const;
virtual bool CanUpdateRailSlabConnection() const;
virtual bool CanUpdateBraseConnection() const;
private:
double GetHeight() const;
private:
double m_oldHeight;
};
}
#endif //VSAR_SELMIINFINITEBASE_H_INCLUDED
<file_sep>/src/NXVsar/src/vsar/Vsar_Bridge.cxx
#include <uf_defs.h>
#include <Vsar_Bridge.hxx>
//#include <boost/cast.hpp>
#include <NXOpen/Expression.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Init_Utils.hxx>
using namespace NXOpen;
using namespace boost;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
static CompAttrInfo attrExpInfo[] =
{
{WIDTH_ID_NAME, BEAM_PRT_PART_NAME, WIDTH_EXP_NAME},
{HEIGHT_ID_NAME, BEAM_PRT_PART_NAME, HEIGHT_EXP_NAME},
{SPAN_COUNT_ID_NAME, BRIDGE_PRT_PART_NAME, SPAN_COUNT_EXP_NAME},
{ELASTIC_MODULUS_ID_NAME, BRIDGE_FEM_PART_NAME, BRIDGE_ELASTIC_MODULUS_EXP_NAME},
{BRIDGE_MASS_DENSITY_ID_NAME, BRIDGE_FEM_PART_NAME, BRIDGE_MASS_DENSITY_EXP_NAME},
{POISSON_RATIO_ID_NAME, BRIDGE_FEM_PART_NAME, BRIDGE_POISSON_RATIO_EXP_NAME}
};
Bridge::Bridge() : BaseComponent(attrExpInfo, N_ELEMENTS(attrExpInfo)),
m_oldHeight(1), m_oldSpanCount(2)
{
}
Bridge::~Bridge()
{
}
void Bridge::OnInit()
{
m_oldHeight = GetHeight();
m_oldSpanCount = GetSpanCount();
}
int Bridge::GetSpanCount() const
{
Expression *pSupportCntExp = NULL;
if (!m_compAttrs.empty())
pSupportCntExp = GetExpression(m_compAttrs[2].m_partName, m_compAttrs[2].m_expName);
return pSupportCntExp ? pSupportCntExp->IntegerValue() : 0;
}
double Bridge::GetHeight() const
{
Expression *pSupportCntExp = NULL;
if (!m_compAttrs.empty())
pSupportCntExp = GetExpression(m_compAttrs[1].m_partName, m_compAttrs[1].m_expName);
return pSupportCntExp ? pSupportCntExp->Value() : 0;
}
bool Bridge::CanUpdateRailSlabFEModel() const
{
return false;
}
bool Bridge::CanUpdateBraseFEModel() const
{
return (GetHeight() != m_oldHeight) || (GetSpanCount() != m_oldSpanCount);
}
bool Bridge::CanUpdateRailSlabConnection() const
{
return false;
}
bool Bridge::CanUpdateBraseConnection() const
{
return GetSpanCount() != m_oldSpanCount;
}
}<file_sep>/src/NXVsar/src/vsar/Vsar_Tunnel.cxx
#include <uf_defs.h>
#include <Vsar_Tunnel.hxx>
//#include <boost/cast.hpp>
//#include <NXOpen/Expression.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Init_Utils.hxx>
using namespace NXOpen;
using namespace boost;
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
namespace Vsar
{
static CompAttrInfo attrExpInfo[] =
{
{DIAMETER_ID_NAME, TUNNEL_PRT_PART_NAME, DIAMETER_EXP_NAME},
{WIDTH_ID_NAME, TUNNEL_PRT_PART_NAME, WIDTH_EXP_NAME},
{TUNNEL_H1_ID_NAME, TUNNEL_PRT_PART_NAME, TUNNEL_H1_EXP_NAME},
{TUNNEL_H2_ID_NAME, TUNNEL_PRT_PART_NAME, TUNNEL_H2_EXP_NAME},
{TUNNEL_H3_ID_NAME, TUNNEL_PRT_PART_NAME, TUNNEL_H3_EXP_NAME}
};
Tunnel::Tunnel() : BaseComponent(attrExpInfo, N_ELEMENTS(attrExpInfo))
{
}
Tunnel::~Tunnel()
{
}
void Tunnel::OnInit()
{
}
bool Tunnel::CanUpdateRailSlabFEModel() const
{
return false;
}
bool Tunnel::CanUpdateBraseFEModel() const
{
return true;
}
bool Tunnel::CanUpdateRailSlabConnection() const
{
return false;
}
bool Tunnel::CanUpdateBraseConnection() const
{
return true;
}
}<file_sep>/src/NXVsar/src/vsarint/VsarUI_TunnelSettings.cxx
//==============================================================================
// WARNING!! This file is overwritten by the Block UI Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\CAE\Response\VSAR-DEV\NXProject\AppRoot\application\Tunnel.cpp
//
// This file was generated by the NX Block UI Styler
// Created by: Joseph
// Version: NX 7.5
// Date: 05-08-2011 (Format: mm-dd-yyyy)
// Time: 14:04 (Format: hh-mm)
//
//==============================================================================
//==============================================================================
// Purpose: This TEMPLATE file contains C++ source to guide you in the
// construction of your Block application dialog. The generation of your
// dialog file (.dlx extension) is the first step towards dialog construction
// within NX. You must now create a NX Open application that
// utilizes this file (.dlx).
//
// The information in this file provides you with the following:
//
// 1. Help on how to load and display your Block UI Styler dialog in NX
// using APIs provided in NXOpen.BlockStyler namespace
// 2. The empty callback methods (stubs) associated with your dialog items
// have also been placed in this file. These empty methods have been
// created simply to start you along with your coding requirements.
// The method name, argument list and possible return values have already
// been provided for you.
//==============================================================================
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <uf_defs.h>
#include <VsarUI_TunnelSettings.hxx>
#include <NXOpen/UI.hxx>
#include <NXOpen/NXMessageBox.hxx>
#include <Vsar_Component.hxx>
#include <Vsar_Init_Utils.hxx>
#include <Vsar_Names.hxx>
#include <Vsar_Tunnel.hxx>
using namespace NXOpen;
using namespace NXOpen::BlockStyler;
using namespace Vsar;
namespace VsarUI
{
//------------------------------------------------------------------------------
// Constructor for NX Styler class
//------------------------------------------------------------------------------
TunnelSettings::TunnelSettings() : BaseCompDialog("Tunnel.dlx",
new Tunnel())
{
}
//------------------------------------------------------------------------------
// Destructor for NX Styler class
//------------------------------------------------------------------------------
TunnelSettings::~TunnelSettings()
{
}
void TunnelSettings::ShowDialog()
{
boost::scoped_ptr<TunnelSettings> pRailSettingDlg(new TunnelSettings());
try
{
// The following method shows the dialog immediately
pRailSettingDlg->Show(BlockDialog::DialogModeEdit);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//---------------------Block UI Styler Callback Functions--------------------------
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
//Callback Name: initialize_cb
//------------------------------------------------------------------------------
void TunnelSettings::InitializeCb()
{
BaseCompDialog::InitializeCb();
try
{
CompositeBlock *pTopBlock = m_theDialog->TopBlock();
//grpGeometry = pTopBlock->FindBlock("grpGeometry");
m_image = pTopBlock->FindBlock("image");
m_diameter = pTopBlock->FindBlock(DIAMETER_ID_NAME);
m_width = pTopBlock->FindBlock(WIDTH_ID_NAME);
m_h1 = pTopBlock->FindBlock(TUNNEL_H1_ID_NAME);
m_h2 = pTopBlock->FindBlock(TUNNEL_H2_ID_NAME);
m_h3 = pTopBlock->FindBlock(TUNNEL_H3_ID_NAME);
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: dialogShown_cb
//This callback is executed just before the dialog launch. Thus any value set
//here will take precedence and dialog will be launched showing that value.
//------------------------------------------------------------------------------
void TunnelSettings::DialogShownCb()
{
try
{
//---- Enter your callback code here -----
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
}
//------------------------------------------------------------------------------
//Callback Name: apply_cb
//------------------------------------------------------------------------------
//int Tunnel::ApplyCb()
//{
// int errorCode = 0;
// try
// {
// //---- Enter your callback code here -----
// }
// catch(std::exception& ex)
// {
// //---- Enter your exception handling code here -----
// errorCode = 1;
// theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
// }
// return errorCode;
//}
//------------------------------------------------------------------------------
//Callback Name: update_cb
//------------------------------------------------------------------------------
int TunnelSettings::UpdateCb(UIBlock* block)
{
try
{
if(block == m_image)
{
//---------Enter your code here-----------
}
else if(block == m_diameter)
{
//---------Enter your code here-----------
}
else if(block == m_width)
{
//---------Enter your code here-----------
}
else if(block == m_h1)
{
//---------Enter your code here-----------
}
else if(block == m_h2)
{
//---------Enter your code here-----------
}
else if(block == m_h3)
{
//---------Enter your code here-----------
}
}
catch(std::exception& ex)
{
//---- Enter your exception handling code here -----
s_theUI->NXMessageBox()->Show("Block Styler", NXMessageBox::DialogTypeError, ex.what());
}
return 0;
}
}
<file_sep>/src/NXVsar/include/VsarUI_TunnelSettings.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\CAE\Response\VSAR-DEV\NXProject\AppRoot\application\Tunnel.hpp
//
// This file was generated by the NX Block Styler
// Created by: Joseph
// Version: NX 7.5
// Date: 05-08-2011 (Format: mm-dd-yyyy)
// Time: 14:04
//
//==============================================================================
#ifndef VSARUI_TUNNELSETTINGS_H_INCLUDED
#define VSARUI_TUNNELSETTINGS_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseCompDialog.hxx>
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace VsarUI
{
class TunnelSettings : public BaseCompDialog
{
// class members
public:
TunnelSettings();
~TunnelSettings();
static void ShowDialog();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the Tunnel.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
//int ApplyCb();
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
private:
//NXOpen::BlockStyler::UIBlock* grpGeometry;// Block type: Group
NXOpen::BlockStyler::UIBlock* m_image;// Block type: Label
NXOpen::BlockStyler::UIBlock* m_diameter;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_width;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_h1;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_h2;// Block type: Expression
NXOpen::BlockStyler::UIBlock* m_h3;// Block type: Expression
};
}
#endif //VSARUI_TUNNELSETTINGS_H_INCLUDED
<file_sep>/src/NXVsdaneWrap/NXVsdaneWrap/src/NXVsdaneDotNet.cxx
// This is the main DLL file.
#include "NXVsdaneDotNet.hxx"
using namespace System;
using namespace NXOpen;
using namespace NXOpen::CAE;
namespace Vsdane
{
void NXVsdane::CreateSweptMeshDotNet(IFEModel ^pFeModel,
String^ meshColName, String ^meshName,
CAEFace ^pSrcFace, CAEFace ^pTargetFace, Expression ^pEleSize)
{
Session ^pSession = Session::GetSession();
MeshManager ^pMeshMgr = safe_cast<MeshManager^>(pFeModel->MeshManager);
Mesh3dHexBuilder ^pMesh3dHexBuilder = pMeshMgr->CreateMesh3dHexBuilder(nullptr);
Session::UndoMarkId undoMark = pSession->SetUndoMark(Session::MarkVisibility::Invisible, "Create Swept Mesh");
try
{
String ^meshColFullName = "MeshCollector[" + meshColName + "]";
MeshCollector ^meshCol(safe_cast<MeshCollector^>(pMeshMgr->FindObject(meshColFullName)));
ElementTypeBuilder ^pEleTypeBuilder = pMesh3dHexBuilder->ElementType;
pEleTypeBuilder->ElementDimension = ElementTypeBuilder::ElementType::SweepSolid;
pEleTypeBuilder->ElementTypeName = "CHEXA(20)";
DestinationCollectorBuilder ^pDstColBulder = pEleTypeBuilder->DestinationCollector;
pDstColBulder->ElementContainer = meshCol;
pDstColBulder->AutomaticMode = false;
pMesh3dHexBuilder->CreationType = Mesh3dHexBuilder::Type::Manual;
//DisplayableObject^[] srcFaces(1, pSrcFace);
bool bSrcFaceAdded = pMesh3dHexBuilder->SourceFaceList->Add(pSrcFace);
pMesh3dHexBuilder->TargetFace->Value = pTargetFace;
PropertyTable ^pPropTable = pMesh3dHexBuilder->PropertyTable;
pPropTable->SetBooleanPropertyValue("mapped mesh option bool", false);
pPropTable->SetIntegerPropertyValue("quad only option", 0);
pPropTable->SetBooleanPropertyValue("project vertices option", false);
pPropTable->SetBooleanPropertyValue("target face smoothing option", false);
// set element size
Expression ^pEleSizeExp = pPropTable->GetScalarPropertyValue("source element size");
pEleSizeExp->RightHandSide = pEleSize->RightHandSide;
// pPropTable->SetScalarPropertyValue("source element size", pEleSizeExp);
int nErrs = pSession->UpdateManager->DoUpdate(undoMark);
#ifdef DEBUG
if (nErrs > 0)
{
ListingWindow ^pLstWnd = pSession->ListingWindow;
pLstWnd->Open();
pLstWnd->WriteLine("\n");
ErrorList ^pErrLst = pSession->UpdateManager->ErrorList;
for (int idx = 0; idx < nErrs; idx++)
{
pLstWnd->WriteLine(pErrLst->GetErrorInfo(idx)->ErrorCode.ToString());
pLstWnd->WriteLine(pErrLst->GetErrorInfo(idx)->Description);
pLstWnd->WriteLine(pErrLst->GetErrorInfo(idx)->ErrorObjectDescription);
pLstWnd->WriteLine("\n");
}
pErrLst->Clear();
}
#endif
array<CAE::Mesh^>^ createdMeshes = pMesh3dHexBuilder->CommitMesh();
int idx = 0;
for each(Mesh^ swepMesh in createdMeshes)
swepMesh->SetName(meshName + "_" + ++idx);
}
finally
{
pMesh3dHexBuilder->Destroy();
pSession->DeleteUndoMark(undoMark, nullptr);
}
}
}<file_sep>/src/VsarInit/include/libvsarinit_exports.h
/*HEAD LIBVSARINIT_EXPORTS HHH VSARINIT */
/*=============================================================================
File description:
This contains the export symbols for the LIBVSARINIT library.
Documentation at http://cipgweb/do/devops_nx:relnotes:autogenerated_export_headers
Autogenerated by //nx/nx8/build/tools/be/GenerateExportsHeaders.pm#3
==============================================================================*/
#ifndef LIBVSARINIT_EXPORTS_H_INCLUDED
#define LIBVSARINIT_EXPORTS_H_INCLUDED
#ifdef USE_PRAGMA_ONCE
#pragma once
#endif
//#if IPLIB==10
#if defined (LIBVSARINIT)
# if defined(_WIN32) || defined(_WIN64)
# define VSARINITEXPORT __declspec(dllexport)
# define VSARINITGLOBAL extern __declspec(dllexport)
# elif __GNUC__ >= 4
# define VSARINITEXPORT __attribute__ ((visibility("default")))
# define VSARINITGLOBAL extern __attribute__ ((visibility("default")))
# else
# define VSARINITEXPORT
# define VSARINITGLOBAL extern
# endif
#else
# if defined(_WIN32) || defined(_WIN64) && !defined(WNT_STATIC_LINK)
# define VSARINITEXPORT __declspec(dllimport)
# define VSARINITGLOBAL extern __declspec(dllimport)
# else
# define VSARINITEXPORT
# define VSARINITGLOBAL extern
# endif
#endif
#endif /* LIBVSARINIT_EXPORTS_H_INCLUDED */
<file_sep>/src/NXVsar/include/VsarUI_SolveNoise.hxx
//==============================================================================
// WARNING!! This file is overwritten by the Block Styler while generating
// the automation code. Any modifications to this file will be lost after
// generating the code again.
//
// Filename: E:\Project\Response\VSAR\NXProject\AppRoot\application\Solve.hpp
//
// This file was generated by the NX Block Styler
// Created by: wujif
// Version: NX 7.5
// Date: 05-23-2011 (Format: mm-dd-yyyy)
// Time: 20:48
//
//==============================================================================
#ifndef VSARUI_SOLVENOISE_H_INCLUDED
#define VSARUI_SOLVENOISE_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <VsarUI_BaseCompDialog.hxx>
namespace NXOpen
{
class TaggedObject;
}
//------------------------------------------------------------------------------
// Forward declaration for Class
//------------------------------------------------------------------------------
namespace VsarUI
{
//------------------------------------------------------------------------------
//Bit Option for Property: SnapPointTypesEnabled
//------------------------------------------------------------------------------
//#define SnapPointTypesEnabled_UserDefined (1 << 0);
//#define SnapPointTypesEnabled_Inferred (1 << 1);
//#define SnapPointTypesEnabled_ScreenPosition (1 << 2);
//#define SnapPointTypesEnabled_EndPoint (1 << 3);
//#define SnapPointTypesEnabled_MidPoint (1 << 4);
//#define SnapPointTypesEnabled_ControlPoint (1 << 5);
//#define SnapPointTypesEnabled_Intersection (1 << 6);
//#define SnapPointTypesEnabled_ArcCenter (1 << 7);
//#define SnapPointTypesEnabled_QuadrantPoint (1 << 8);
//#define SnapPointTypesEnabled_ExistingPoint (1 << 9);
//#define SnapPointTypesEnabled_PointonCurve (1 <<10);
//#define SnapPointTypesEnabled_PointonSurface (1 <<11);
//#define SnapPointTypesEnabled_PointConstructor (1 <<12);
//#define SnapPointTypesEnabled_TwocurveIntersection (1 <<13);
//#define SnapPointTypesEnabled_TangentPoint (1 <<14);
//#define SnapPointTypesEnabled_Poles (1 <<15);
//#define SnapPointTypesEnabled_BoundedGridPoint (1 <<16);
//------------------------------------------------------------------------------
//Bit Option for Property: SnapPointTypesOnByDefault
//------------------------------------------------------------------------------
//#define SnapPointTypesOnByDefault_UserDefined (1 << 0);
//#define SnapPointTypesOnByDefault_Inferred (1 << 1);
//#define SnapPointTypesOnByDefault_ScreenPosition (1 << 2);
//#define SnapPointTypesOnByDefault_EndPoint (1 << 3);
//#define SnapPointTypesOnByDefault_MidPoint (1 << 4);
//#define SnapPointTypesOnByDefault_ControlPoint (1 << 5);
//#define SnapPointTypesOnByDefault_Intersection (1 << 6);
//#define SnapPointTypesOnByDefault_ArcCenter (1 << 7);
//#define SnapPointTypesOnByDefault_QuadrantPoint (1 << 8);
//#define SnapPointTypesOnByDefault_ExistingPoint (1 << 9);
//#define SnapPointTypesOnByDefault_PointonCurve (1 <<10);
//#define SnapPointTypesOnByDefault_PointonSurface (1 <<11);
//#define SnapPointTypesOnByDefault_PointConstructor (1 <<12);
//#define SnapPointTypesOnByDefault_TwocurveIntersection (1 <<13);
//#define SnapPointTypesOnByDefault_TangentPoint (1 <<14);
//#define SnapPointTypesOnByDefault_Poles (1 <<15);
//#define SnapPointTypesOnByDefault_BoundedGridPoint (1 <<16);
//class SelectPoint : public BaseDialog
//{
//public:
// SelectPoint();
// ~SelectPoint();
// virtual void InitializeCb();
// virtual int ApplyCb();
// virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
//private:
// NXOpen::BlockStyler::UIBlock* m_selectPoint;// Block type: Specify Point
//};
class SolveNoise : public BaseCompDialog
{
public:
// class members
public:
SolveNoise();
~SolveNoise();
static void ShowDialog();
//----------------------- BlockStyler Callback Prototypes ---------------------
// The following member function prototypes define the callbacks
// specified in your BlockStyler dialog. The empty implementaiton
// of these prototypes is provided in the Solve.cpp file.
// You are REQUIRED to write the implementation for these funtions.
//------------------------------------------------------------------------------
virtual void InitializeCb();
virtual void DialogShownCb();
virtual int ApplyCb();
virtual int FilterCb(NXOpen::BlockStyler::UIBlock *pBlock, NXOpen::TaggedObject *pSel);
virtual int UpdateCb(NXOpen::BlockStyler::UIBlock* block);
protected:
//bool CanOutputElements() const;
//std::vector<NXOpen::TaggedObject*> GetOutputElements() const;
//bool CanOutputNodes() const;
//std::vector<NXOpen::TaggedObject*> GetOutputNodes() const;
//bool CanOutputNodesForNoise() const;
private:
//NXOpen::BlockStyler::UIBlock* m_outputPointList;// Block type: Select Elements
NXOpen::BlockStyler::UIBlock* m_selectPoints;// Block type: Select Elements
//NXOpen::BlockStyler::UIBlock* m_specifyPoint;// Block type: Select Elements
};
}
#endif //VSARUI_SOLVENOISE_H_INCLUDED
<file_sep>/src/NXVsdaneWrap/NXVsdaneWrap/src/NXVsdane.cxx
// This is the main DLL file.
#pragma managed
#include "NXVsdaneBridge.hxx"
#pragma unmanaged
#include "NXVsdane.hxx"
//#include <NXOpen/Expression.hxx>
//#include <NXOpen/CAE_IFEModel.hxx>
//#include <NXOpen/CAE_CAEFace.hxx>
//using namespace NXOpen;
//using namespace NXOpen::CAE;
namespace Vsdane
{
//void CreateSweptMesh(IFEModel *pFeModel,
// const std::string &meshColName, const std::string &meshName,
// CAEFace* pSrcFace, CAEFace *pTargetFace, Expression *pEleSize)
//{
// NXVsdaneBridge::CreateSweptMesh(pFeModel, meshName, meshColName, pSrcFace, pTargetFace, pEleSize);
//}
//void CreateSweptMesh(void *pFeModel,
// const std::string &meshColName, const std::string &meshName,
// void* pSrcFace, void *pTargetFace, void *pEleSize)
//{
// NXVsdaneBridge::CreateSweptMesh(pFeModel, meshName, meshColName, pSrcFace, pTargetFace, pEleSize);
//}
void CreateSweptMesh(tag_t tFeModel,
const std::string &meshColName, const std::string &meshName,
tag_t tSrcFace, tag_t tTargetFace, tag_t tEleSize)
{
NXVsdaneBridge bridge;
bridge.CreateSweptMesh(tFeModel, meshName, meshColName, tSrcFace, tTargetFace, tEleSize);
}
}<file_sep>/src/NXVsar/include/Vsar_Utils.hxx
#ifndef VSAR_UTILS_H_INCLUDED
#define VSAR_UTILS_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
#include <string>
#include <vector>
namespace NXOpen
{
class BasePart;
class Part;
class Body;
class TaggedObject;
class DisplayableObject;
class Expression;
namespace Assemblies
{
class Component;
}
namespace CAE
{
class IFEModel;
class BaseFEModel;
class BaseFemPart;
class MeshManager;
class MeshCollector;
class CAEBody;
class CAEFace;
class FEModelOccurrence;
class IHierarchicalFEModel;
class Mesh;
class FENode;
class CaePart;
}
}
namespace Vsar
{
//------------------------------------------------------------------------------
// Declaration of global variables
//------------------------------------------------------------------------------
//extern NewProject *theVsar_NewProject;
int ReportError(const std::string &file, int line, const std::string &call, int irc);
#ifndef UF_CALL
#define UF_CALL(X) (ReportError( __FILE__, __LINE__, #X, (X)))
#endif
void LoadRootPart();
std::vector<NXOpen::Body*> GetBodyByName(NXOpen::Part *pBodyPrt, const std::string &bodyName);
std::vector<NXOpen::TaggedObject*> GetPointByAttrName( NXOpen::BasePart * pPtPrt, const std::string &ptName );
std::vector<NXOpen::TaggedObject*> GetPointByLayer( NXOpen::BasePart * pPtPrt, int layer );
std::vector<NXOpen::Assemblies::Component*> GetOccInCompTree(NXOpen::Assemblies::Component *pAssemTree, NXOpen::Part *pPrt);
std::vector<NXOpen::CAE::CAEBody*> GetCaeBodies(const std::vector<NXOpen::Body*> &bodies);
std::vector<NXOpen::CAE::CAEFace*> GetCaeFaceByName(NXOpen::CAE::CAEBody *pCaeBody, const std::string &faceName);
std::vector<NXOpen::CAE::CAEFace*> GetCaeFaceByName(NXOpen::CAE::CaePart *pPrt, const std::string &faceName);
std::vector<NXOpen::CAE::CAEBody*> GetCaeBodyByName(NXOpen::CAE::CaePart *pPrt, const std::string &bodyName);
std::vector<NXOpen::CAE::CAEFace*> GetCaeFacesOfBodyByName(NXOpen::CAE::CaePart *pPrt, const std::string &bodyName, const std::string &faceName);
void EditSweptMeshData(NXOpen::CAE::IFEModel *pFeModel, const std::string &meshName,
const std::vector<NXOpen::Body*> &srcBodies);
void CreateSweptMesh(NXOpen::CAE::MeshManager *pMeshMgr, NXOpen::CAE::MeshCollector *pMeshCol,
const std::string &meshName, NXOpen::CAE::CAEFace* pSrcFace, NXOpen::CAE::CAEFace *pTargetFace,
const std::string &eleSizeExpName);
void CreateSweptMesh_sf(NXOpen::CAE::IFEModel *pFeModel, NXOpen::CAE::CAEBody *pSolidBody,
const std::string &meshColName, const std::string &meshName,
NXOpen::CAE::CAEFace* pSrcFace, NXOpen::Expression *pEleSize);
void Update1DConnection(NXOpen::CAE::BaseFEModel *pFeModel,
const std::vector<NXOpen::TaggedObject*> &railConnectPts,
const std::vector<NXOpen::TaggedObject*> &slabConnectPts,
const std::string &connName,
const std::string &connColName,
const std::string &meshName);
NXOpen::CAE::FEModelOccurrence* GetFEModelOccByMeshName(NXOpen::CAE::IHierarchicalFEModel *pHieFeModel,
const std::string &meshName);
NXOpen::CAE::Mesh* GetMeshByName(NXOpen::CAE::IFEModel *pFEModel,
const std::string &meshNamePattern, const std::string &meshName);
void DeleteMeshesInCollector(NXOpen::CAE::IFEModel *pFeModel, const std::string &meshColName);
std::vector<NXOpen::CAE::Mesh*> GetMeshesInCollector(NXOpen::CAE::IFEModel *pFeModel,
const std::string &meshNamePattern, const std::string &meshColName);
std::vector<NXOpen::CAE::FENode*> GetNodesOnFace(NXOpen::CAE::CaePart *pCaePrt, const std::vector<NXOpen::CAE::CAEFace*> &pFaces);
int GetNodeOffset(NXOpen::CAE::FEModelOccurrence *pFeModelOcc);
std::vector<NXOpen::CAE::FENode*> GetNodeOcc(NXOpen::CAE::FEModelOccurrence *pFeModelOcc,
int nodeOffset,
const std::vector<NXOpen::CAE::FENode*> &nodeProtos);
NXOpen::CAE::Mesh* GetMesh(NXOpen::TaggedObject *pNode);
NXOpen::CAE::FEModelOccurrence* GetFEModelOccOfNode(NXOpen::CAE::FEModelOccurrence *pParentFEModel, NXOpen::CAE::FENode *pNodeProto);
std::string GetNXVersion();
}
//#if defined(__MSVC_RUNTIME_CHECKS)
//#undef __MSVC_RUNTIME_CHECKS
//#endif
//#if !defined(_DEBUG)
//#define _DEBUG
//#endif
#endif //VSAR_UTILS_H_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_Slab.hxx
#ifndef VSAR_SLAB_H_INCLUDED
#define VSAR_SLAB_H_INCLUDED
#include <Vsar_Component.hxx>
namespace Vsar
{
class Slab : public BaseComponent
{
public:
Slab();
~Slab();
virtual void OnInit();
protected:
virtual bool CanUpdateRailSlabFEModel() const;
virtual bool CanUpdateBraseFEModel() const;
virtual bool CanUpdateRailSlabConnection() const;
virtual bool CanUpdateBraseConnection() const;
private:
int GetSupportCount() const;
private:
int m_oldSupportCount;
};
}
#endif //VSAR_SLAB_H_INCLUDED
<file_sep>/src/NXVsar/include/Vsar_Train.hxx
#ifndef VSAR_TRAIN_H_INCLUDED
#define VSAR_TRAIN_H_INCLUDED
#include <Vsar_Component.hxx>
//namespace NXOpen
//{
// class Body;
//
// namespace CAE
// {
// class FemPart;
// }
//}
namespace Vsar
{
class Train : public BaseComponent
{
public:
Train();
~Train();
virtual void OnInit();
protected:
virtual bool CanUpdateRailSlabFEModel() const;
virtual bool CanUpdateBraseFEModel() const;
virtual bool CanUpdateRailSlabConnection() const;
virtual bool CanUpdateBraseConnection() const;
//virtual void UpdateRailSlabModel();
//virtual void UpdateBraseModel();
//std::vector<NXOpen::Body*> GetGeoModelOccs(NXOpen::CAE::FemPart *pFemPart,
// const std::string &bodyPrtName, const std::string &bodyName);
//void UpdateRailSlabConnection(NXOpen::CAE::FemPart *pFemPart);
//void UpdateBaseSlabConnection(NXOpen::CAE::FemPart *pFemPart);
private:
int GetCarriageCount() const;
//void SetFeGeometryData( NXOpen::CAE::FemPart * pFemPart, const std::vector<NXOpen::Body*> &bodyOccs, bool syncLines );
private:
int m_oldCarriageCount;
};
}
#endif //VSAR_TRAIN_H_INCLUDED
<file_sep>/src/VsarInit/include/Vsar_Init_Names.hxx
#ifndef VSAR_INIT_NAMES_H_INCLUDED
#define VSAR_INIT_NAMES_H_INCLUDED
//------------------------------------------------------------------------------
//These includes are needed for the following template code
//------------------------------------------------------------------------------
namespace Vsar
{
const char * const ROOT_PART_NAME = "rail-transit.prt";
const char * const RAIL_SLAB_FEM_BASE_NAME = "RailSlab";
const char * const PROJECT_TYPE_NAME_BRIDGE = "BRIDGE";
const char * const TEMPLATE_BASE_NAME_BRIDGE = "RailSlabBridge";
const char * const TEMPLATE_BRACE_BASE_NAME_BRIDGE = "Bridge";
const char * const PROJECT_TYPE_NAME_SELMI_INFINITE = "SELMI-INFINITE";
const char * const TEMPLATE_BASE_NAME_SELMI_INFINITE = "RailSlabBase";
const char * const TEMPLATE_BRACE_BASE_NAME_SELMI_INFINITE = "Base";
const char * const PROJECT_TYPE_NAME_TUNNEL = "TUNNEL";
const char * const TEMPLATE_BASE_NAME_TUNNEL = "RailSlabTunnel";
const char * const TEMPLATE_BRACE_BASE_NAME_TUNNEL = "Tunnel";
const char * const TEMPLATE_FOLDER_NAME = "template";
const char * const TEMPLATE_COMMON_FOLDER_NAME = "common";
const char * const ATTRIBUTE_PROJECT_NAME = "VSAR_PROJECT_NAME";
const char * const ATTRIBUTE_PROJECT_TYPE = "VSAR_PROJECT_TYPE";
const char * const ATTRIBUTE_PROJECT_STATUS = "VSAR_PROJECT_STATUS";
const char * const ATTRIBUTE_PROJECT_STATUS_DEFINED = "PROJECT_DEFINED";
const char * const ATTRIBUTE_PROJECT_STATUS_RESPONSE_SOLVED = "PROJECT_RESPONSE_SOLVED";
const char * const ATTRIBUTE_PROJECT_STATUS_RESPONSE_NOISE_SOLVED = "PROJECT_RESPONSE_NOISE_SOLVED";
const char * const ATTRIBUTE_PROJECT_STATUS_NOISE_SOLVED = "PROJECT_NOISE_SOLVED";
const char * const MENU_ITEM_NAME_NEW_PROJECT = "VSAR_NEW_PROJECT";
const char * const MENU_ITEM_NAME_SET_TRAIN = "VSAR_SET_TRAIN";
const char * const MENU_ITEM_NAME_SET_RAIL = "VSAR_SET_RAIL";
const char * const MENU_ITEM_NAME_SET_SLAB = "VSAR_SET_SLAB";
const char * const MENU_ITEM_NAME_SET_BRACE = "VSAR_SET_BRACE";
const char * const MENU_ITEM_NAME_SET_BRIDGE = "VSAR_SET_BRIDGE";
const char * const MENU_ITEM_NAME_SET_BASE = "VSAR_SET_BASE";
const char * const MENU_ITEM_NAME_SET_TUNNEL = "VSAR_SET_TUNNEL";
const char * const MENU_ITEM_NAME_EXECUTE_SOLVE = "VSAR_EXECUTE_SOLVE";
const char * const MENU_ITEM_NAME_SOLVE_RESPONSE = "VSAR_SOLVE_RESPONSE";
const char * const MENU_ITEM_NAME_SOLVE_NOISE = "VSAR_SOLVE_NOISE";
const char * const MENU_ITEM_NAME_LOAD_RESULT = "VSAR_LOAD_RESULT";
//////////////////////////////////////////////////////////////////////////
// Result Name
#if 0
const char * const RESPONSE_RESULT_FILE_SUFFIX_NAME = "_response";
#endif
const char * const VSDANE_SOLUTION_NAME = "Vsdane109";
const char * const NOISE_INTERMEDIATE_RESULT_FILE_PATTERN_NAME = "%1%_noise_intermediate.afu";
const char * const NOISE_RESULT_FILE_PATTERN_NAME = "%1%_noise.afu";
const char * const RESPONSE_OP2_RESULT_FILE_PATTERN_NAME = "%1%_s-%2%.op2";
const char * const RESPONSE_AFU_RESULT_FILE_PATTERN_NAME = "%1%_s-%2%.afu";
}
#endif //VSAR_INIT_NAMES_H_INCLUDED
| bad4f265ee63b699796fb120a96528406f2f72f7 | [
"C",
"C++"
] | 54 | C++ | unidevop/nxvsar | 022f02524e6506f5ae5c3d49d69b717d7e83d205 | 3fd6e049f26fe3e183b0ea3c3b264110901507be |
refs/heads/master | <file_sep><?php
require_once dirname(__FILE__) . '/../Clases/ConectorBD.php';
require_once dirname(__FILE__) . '/../Clases/Compras.php';
foreach ($_POST as $Variable => $Valor)
${$Variable} = $Valor;
foreach ($_GET as $Variable => $Valor)
${$Variable} = $Valor;
$datos = ConectorBD::ejecutarQuery($cadenasql, null);
$lista = "";
if (count($datos) > 0) {
$lista .= "{$datos[0]['idinventario']}=={$datos[0]['nombre']}=={$datos[0]['valor']}==1";
} else {
$lista .="null";
}
echo $lista;
?><file_sep><?php
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
$ganancia="";
if (isset($_POST['nombre'])&&$_POST['nombre']!=NULL){
$nombresmenu=$_POST['nombre'];
$filtro=" where concat(idinventario,nombre) like'%$nombresmenu%'";
}
$cadenasql="SELECT idcompras,nombre,descripcion,valorventauni, SUM(ventasdetalle.cantidad),valorcomprauni from ventasdetalle,compras WHERE idcompras=idcompra GROUP by idcompras $filtro order by cantidad desc limit 10";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$resultado="";
$contador=1;
$contadortotal=1;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
// $lista.='<td><img src="presentacion/lib/barcode.php?text='.$datos[$i][0].'&size=20&codetype=code39&print=true "></td>';
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$lista.="<td>{$datos[$i][4]}</td>";
$subtotalventas=$datos[$i][3]*$datos[$i][4];
$lista.="<td> ".number_format($subtotalventas)."</td>";
$subtotalcompras=$datos[$i][5]*$datos[$i][4];
$ganancia=$subtotalventas-$subtotalcompras;
$lista.="<td> ".number_format($ganancia)."</td>";
$lista.="</tr>";
$contador=$contador+1;
$contadortotal+=$subtotalventas;
}
$resultado.="<h2 class='text-center'> Total $ ". number_format($contadortotal)."</h2 > ";
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
if(isset($_GET['export'])) {
if($_GET['export']=='excel'){
$filename = "reporte.xls";
header("Content-Type: application/vnd.ms-excel");
header("Content-Disposition: attachment; filename=".$filename);
}
}
if(isset($_GET['export'])) {
if($_GET['export']=='word'){
$filename = "reporte.doc";
header("Content-Type: application/vnd.ms-word");
header("Content-Disposition: attachment; filename=".$filename);
}
}
if(isset($_GET['export'])) {
if($_GET['export']=='pdf'){
require_once dirname(__FILE__) . '/../presentacion/lib/mpdf-master/mpdf.php';
$html = ' <H2 >REPORTE DE PRODUCTOS MAS VENDIDOS </H2>
<table class="table table-responsive table-hover " style="background: white;">
<tr class="table-dark successx"><th>NUMERO</th><th>NOMBRE</th><th>DESCRIPCION</th><th>CANTIDAD VENDIDA</th> <th>SUBTOTAL</th> <th>GANANCIA</th>
';
$html.=$lista;
$html.='</table>';
$mpdf=new mPDF('c');
$mpdf->WriteHTML($html);
$mpdf->Output();
exit();
}
}
?>
<center>
<H2 >REPORTE DE PRODUCTOS MAS VENDIDOS </H2>
<table class="table table-responsive table-hover " style="background: white;">
<tr class="table-dark successx"><th>NUMERO</th><th>NOMBRE</th><th>DESCRIPCION</th><th>CANTIDAD VENDIDA</th> <th>SUBTOTAL</th> <th>GANANCIA</th>
</tr>
<?=$lista?>
</table>
<?=$resultado?>
</div>
</center> <file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
if (isset($_POST['desde'])&&$_POST['desde']!=NULL&&isset($_POST['hasta'])&&$_POST['hasta']!=NULL){
$desde=$_POST['desde'];
$hasta=$_POST['hasta'];
$filtro=" where fechasistema between '$desde' and '$hasta'";
}
$cadenasql="select*from ventas $filtro order by fechasistema desc";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$contador=1;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$cadenasql1="select sum(ventasdetalle.cantidad*valorventauni)as total from ventasdetalle,compras where idcompras=idcompra and idventa='{$datos[$i][0]}' ";
$datos1= ConectorBD::ejecutarQuery($cadenasql1, NULL);
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>$ ". number_format($datos1[0][0])."</td>";
$lista.='<th><a href="principal.php?CONTENIDO=admon/ventasdetalle.php&accion=Detalle&idventa='.$datos[$i][0].'" ><img src="presentacion/imagenes/detalles.png" title="DETALLES VENTA" /></a>';
$lista.="</tr>";
$contador=$contador+1;
}
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
?>
<div class="container">
<div class="offset-8 col-md-6 "style="z-index: 100; margin:5% 50%; position: absolute;background: #236780;">
<form method="post" class="">
<table class="table-responsive-lg table table-dark " >
<tr>
<td>DESDE<td><input class="form-control" type="date" name="desde" ></td>
<td>HASTA<td><input class="form-control" type="date" name="hasta" ></td>
<td><input class=" btn btn-primary"type="submit" value="BUSCAR"></td>
</tr>
</table>
</form>
</div>
<br><br><br>
<H2 >VENTAS REALIZADAS </H2>
<table class="table table-responsive table-hover" style="background: white;">
<tr class="table-dark successx"><th>NUM DE VENTA</th><th>FECHA DE REGISTRO</th><th>VALOR TOTAL VENTA</th>
</tr>
<?=$lista?>
</table>
</div>
<script>
function eliminar(idcompra){
if(confirm("Desea eliminar este registro"))
location="principal.php?CONTENIDO=admon/actualizarcompra.php&accion=ELIMINAR&idcompra="+idcompra;
}
$(document).bind('keydown', 'f1', function(){
location="principal.php?CONTENIDO=admon/formulariocompra.php&accion=Adicionar"
});
</script>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
require_once dirname(__FILE__).'/../Clases/Compras.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
foreach ($_GET as $Variable => $Valor) ${$Variable}=$Valor;
switch ($accion){
case 'ADICIONAR':
$cadena="insert into compras values('$idcompra','$nombre','$descripcion',$cantidad,$stockminimo,$valorcomprauni,$valorventauni);";
ConectorBD::ejecutarQuery($cadena, null);
$cadena="insert into inventario values('$idcompra','$nombre','$descripcion',$cantidad,$stockminimo,$valorventauni);";
ConectorBD::ejecutarQuery($cadena, null);
break;
case 'MODIFICAR':
$cadena="update compras set idcompra='$idcompra',nombre='$nombre',descripcion='$descripcion',cantidad=$cantidad,stockminimo=$stockminimo,valorcomprauni=$valorcomprauni,valorventauni=$valorventauni where idcompra='$idcompraA'";
ConectorBD::ejecutarQuery($cadena, null);
$cadena="update inventario set idinventario='$idcompra',nombre='$nombre',descripcion='$descripcion',cantidad=$cantidad,stockminimo=$stockminimo,valor=$valorventauni where idinventario='$idcompraA'";
print_r($cadena);
ConectorBD::ejecutarQuery($cadena, null);
break;
case 'ELIMINAR':
$cadena="delete from compras where idcompra='$idcompra'";
ConectorBD::ejecutarQuery($cadena, null);
$cadena="delete from inventario where idinventario='$idcompra'";
ConectorBD::ejecutarQuery($cadena, null);
break;
}
header("location: principal.php?CONTENIDO=admon/compras.php")
?>
<file_sep><!DOCTYPE HTML>
<!--
-->
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>CODSAMI</title>
<link rel="icon" type="image/png" href="presentacion/css/index/images/iconbarras.PNG"/>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="Free HTML5 Website Template by FreeHTML5.co" />
<meta name="keywords" content="free website templates, free html5, free template, free bootstrap, free website template, html5, css3, mobile first, responsive" />
<meta name="author" content="FreeHTML5.co" />
<link rel="stylesheet" href="presentacion/css/menu/css/animate.css">
<link rel="stylesheet" href="presentacion/css/menu/css/icomoon.css">
<link rel="stylesheet" href="presentacion/css/menu/css/themify-icons.css">
<link rel="stylesheet" href="presentacion/css/menu/css/bootstrap.css">
<link rel="stylesheet" href="presentacion/css/menu/css/magnific-popup.css">
<link rel="stylesheet" href="presentacion/css/menu/css/owl.carousel.min.css">
<link rel="stylesheet" href="presentacion/css/menu/css/owl.theme.default.min.css">
<link rel="stylesheet" href="presentacion/css/menu/css/flexslider.css">
<link rel="stylesheet" href="presentacion/css/menu/css/style.css">
<link href="presentacion/css/principal.css" rel="stylesheet" type="text/css"/>
<script src="presentacion/css/menu/js/modernizr-2.6.2.min.js"></script>
<script src="presentacion/lib/jquery-3.3.1.min.js" type="text/javascript"></script>
<script src="presentacion/lib/jquery.hotkeys.js" type="text/javascript"></script>
<style>
body{
background:#C9E8F3;
}
</style>
</head>
<body >
<img src="" alt=""/>
<div class="gtco-loader"></div>
<div id="page">
<nav class="gtco-nav" role="navigation" style="background: #236780;font-weight: bold; font-family: arial;">
<div class="container">
<div class="row">
<div class="col-sm-2 col-xs-12">
<a href="principal.php?CONTENIDO=inicio.php"><img src="presentacion/css/index/images/iconbarras.PNG" width="80" height="50" style="position: fixed;border-radius:20%;" /></a>
<div id="gtco-logo"style="margin: 0% 50%;"> <a href="principal.php?CONTENIDO=inicio.php"> COD<em>SAMI</em></a></div>
<br>
</div>
<div class="col-xs-10 text-right menu-1 ">
<ul>
<li ><a href="principal.php?CONTENIDO=inicio.php" >INICIO</a></li>
<li><a href="principal.php?CONTENIDO=admon/compras.php" >COMPRAS</a></li>
<li><a href="principal.php?CONTENIDO=admon/ventas.php" >VENTAS</a></li>
<li><a href="principal.php?CONTENIDO=admon/inventario.php" >INVENTARIO</a></li>
<li><a href="principal.php?CONTENIDO=admon/reportes.php" >REPORTES</a></li>
<li><a href="principal.php?CONTENIDO=admon/indicadores.php" >INDICADORES</a></li>
<li class="btn-cta"><a href="index.php" ><span>SALIR</span></a></li>
<li><a href="principal.php?CONTENIDO=admon/cambioclaveadmin.php" > <img src="presentacion/imagenes/configure.png" width="30" height="30"/></a></li>
</ul>
</div>
</div>
</div>
</nav>
<div class="contenido">
<?php include $_GET['CONTENIDO']?>
</div>
<footer id="gtco-footer" role="contentinfo">
<div class="container">
<div class="row copyright">
<div class="col-md-12">
<p class="pull-left">
<small class="block">© 2018 Rights Reserved.</small>
<small class="block"> <a href="https://www.facebook.com/johanalexis.caratarpabon" target="_blank"><NAME></a> ___ <a href="http://www.sena.edu.co/es-co/Paginas/default.aspx" target="_blank"> Sena Nariño2018</a></small>
</p>
<p class="pull-right">
<ul class="gtco-social-icons pull-right">
<li><a href="#"><i class="icon-twitter"></i></a></li>
<li><a href="#"><i class="icon-facebook"></i></a></li>
<li><a href="#"><i class="icon-linkedin"></i></a></li>
<li><a href="#"><i class="icon-dribbble"></i></a></li>
</ul>
</p>
</div>
</div>
</div>
</footer>
</div>
<div class="gototop js-top">
<a href="#" class="js-gotop"><i class="icon-arrow-up"></i></a>
</div>
<script src="presentacion/css/menu/js/jquery.min.js"></script>
<script src="presentacion/css/menu/js/jquery.easing.1.3.js"></script>
<script src="presentacion/css/menu/js/bootstrap.min.js"></script>
<script src="presentacion/css/menu/js/jquery.waypoints.min.js"></script>
<script src="presentacion/css/menu/js/jquery.stellar.min.js"></script>
<script src="presentacion/css/menu/js/jquery.magnific-popup.min.js"></script>
<script src="presentacion/css/menu/js/magnific-popup-options.js"></script>
<script src="presentacion/css/menu/js/main.js"></script>
</body>
</html>
<file_sep>; this is an INI file
[bd]
controlador = mysql
servidor = localhost
puerto = 3306
bd = codsami
usuario = root
clave =
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
foreach ($_GET as $Variable => $Valor) ${$Variable}=$Valor;
$cadenasql="select max(idcompra)+12345 from compras ";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$d=rand(1,999999999)*999;
$codigogenerado=$d+$datos[0][0];
header("location: principal.php?CONTENIDO=admon/formulariocompra.php&codigos=$codigogenerado&accion=$accion")
?>
<file_sep><?php
header('HTTP/1.1 200 OK');
header ('Content-Type: application/json;charset=UTF-8');
require_once '../Clases/ConectorBD.php';
$codigo=$_POST['codigo'];
$nombre=$_POST['nombre'];
$descripcion=$_POST['descripcion'];
$cantidad=$_POST['cantidad'];
$stockminimo=$_POST['stockminimo'];
$valorcomprauni=$_POST['valorcomprauni'];
$valorventauni=$_POST['valorventauni'];
$cadenaSQL="insert into compras values($codigo,'$nombre','$descripcion',$cantidad,$stockminimo,$valorcomprauni,$valorventauni);";
ConectorBD::ejecutarQuery($cadenaSQL,'codsami');
$cadenaSQL1="insert into inventario values($codigo,'$nombre','$descripcion',$cantidad,$stockminimo,$valorventauni);";
ConectorBD::ejecutarQuery($cadenaSQL1,'codsami');
?><file_sep><?php
header('HTTP/1.1 200 OK');
header ('Content-Type: application/json;charset=UTF-8');
require_once '../Clases/ConectorBD.php';
$jsons='';
$cadenaSQL="SELECT idinventario,nombre,descripcion,cantidad,stockminimo,valor FROM inventario ";
$resultado=ConectorBD::ejecutarQuery($cadenaSQL,'codsami');
$json='[';
for ($i = 0; $i < count($resultado); $i++) {
if( $resultado[$i]['stockminimo']>=$resultado[$i]['cantidad']){
$json.='{';
$json.="'idinventario':'{$resultado[$i]['idinventario']}',";
$json.="'nombre':'{$resultado[$i]['nombre']}',";
$json.="'descripcion':'{$resultado[$i]['descripcion']}',";
$json.="'cantidad':'{$resultado[$i]['cantidad']}',";
$json.="'stockminimo':'{$resultado[$i]['stockminimo']}',";
$json.="'valor':'{$resultado[$i]['valor']}'";
$json.="},";
}
}
$rest = substr($json,0,-1);
$jsons.=$rest.']';
echo($jsons);
?><file_sep><?php
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
if (isset($_POST['nombre'])&&$_POST['nombre']!=NULL){
$nombresmenu=$_POST['nombre'];
$filtro=" where concat(idinventario,nombre) like'%$nombresmenu%'";
}
$cadenasql="select*from inventario $filtro";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$contador=1;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
// $lista.='<td><img src="presentacion/lib/barcode.php?text='.$datos[$i][0].'&size=20&codetype=code39&print=true "></td>';
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$lista.="<td>{$datos[$i][3]}</td>";
$lista.="</tr>";
$contador=$contador+1;
}
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
?>
<div class="offset-8 col-md-4 "style="z-index: 100; margin:5% 65%; position:absolute;background: #236780;">
<form method="post" class="">
<table class="table-responsive-lg table table-dark " >
<tr>
<th> <img src="presentacion/imagenes/buscarpequeño.png"></span></th><td><input class="form-control" type="text" name="nombre" placeholder="Nombre o codigo" ></td>
<td><input class="btn-primary"type="submit" value="BUSCAR"></td>
</tr>
</table>
</form>
</div>
<br><br><br><br>
<div class="container-fluid">
<H2 >INVENTARIO </H2>
<table class="table table-responsive table-hover" style="background: white;">
<tr class="table-dark successx"><th>NUMERO</th><th>NOMBRE</th><th>DESCRIPCION</th><th>CANTIDAD</th>
</tr>
<?=$lista?>
</table>
</div>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
require_once dirname(__FILE__).'/../Clases/Compras.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
foreach ($_GET as $Variable => $Valor) ${$Variable}=$Valor;
$codigo="";
$generarcodigo="";
if ($accion=='Modificar'){$compras=new Compras('idcompra', $idcompra);
$codigoss="";
$codigos="";
$codigo.='<img src="presentacion/lib/barcode.php?text='.$compras->getIdcompra().'&size=80&codetype=code39&print=true ">';
}
else {$compras=new Compras(null, null);
if (isset($codigos)){$codigos=$codigos;}
else $codigos=null;
$codigoss.='<img src="presentacion/lib/barcode.php?text='.$codigos.'&size=80&codetype=code39&print=true ">';
$generarcodigo="<a href='principal.php?CONTENIDO=admon/generarcodigo.php&accion=$accion'>GENERAR CODIGO</a>";
}
?>
<br><br><br>
<div class="container-fluid">
<h2><?= strtoupper($accion)?> COMPRA</h2>
<div class="offset-2 col-md-8">
<form name="formulario" method="POST" action="principal.php?CONTENIDO=admon/actualizarcompra.php">
<table class="table">
<tr><th id="verificar"></th><tr>
<tr><th>CODIGO DE BARRAS</th><td><input class="form-control" type="text" id="idcompra" onchange="verificarcodigo()" name="idcompra" value="<?=$codigos?><?=$compras->getIdcompra()?>" required ></td></tr>
<tr><th>NOMBRE</th><td><input class="form-control" type="text" name="nombre" value="<?=$compras->getNombre()?>" required></td></tr>
<tr><th>DESCRIPCION</th><td><input class="form-control" type="text" name="descripcion" value="<?=$compras->getDescripcion()?>" required></td></tr>
<tr><th>CANTIDAD</th><td><input class="form-control" type="number" name="cantidad" value="<?=$compras->getCantidad()?>" required></td></tr>
<tr><th>CANTIDAD MINIMA DE PRODUCTO</th><td><input class="form-control" type="number" name="stockminimo" value="<?=$compras->getstockminimo()?>" required></td></tr>
<tr><th>VALOR UNITARIO COMPRA</th><td><input class="form-control" type="number" name="valorcomprauni" value="<?=$compras->getValorcomprauni()?>" required></td></tr>
<tr><th>VALOR UNITARIO VENTA</th><td><input class="form-control" type="number" name="valorventauni" value="<?=$compras->getValorventauni()?>" required></td></tr>
</table>
<center><input class="btn btn-primary" type="hidden" name="idcompraA" value="<?= $idcompra?>"></center>
<center><input class="btn btn-primary"type="submit" name="accion" accesskey="5" value="<?= strtoupper($accion)?>"></center>
</form>
</div>
<?=$codigo?><?=$generarcodigo?><br><br><?=$codigoss?>
</div>
<script>
function verificarcodigo(){
var idcodigo=$('#idcompra').val();
if(idcodigo!=""){
var $cadenasql="select*From compras where idcompra='"+idcodigo+"'";
$.ajax({
url:'admon/verificarproducto.php',
type:'post',
data:{cadenasql:$cadenasql},
success: function (data, textStatus, jqXHR) {
$("#verificar").html(data);
}
});
}
}
</script><file_sep><?php
header('HTTP/1.1 200 OK');
header ('Content-Type: application/json;charset=UTF-8');
require_once '../Clases/ConectorBD.php';
$usuario=$_POST['usuario'];
$clave=$_POST['clave'];
$cadenaSQL="select idusuario,clave from usuario where idusuario='$usuario' and clave='$clave'";
$resultado=ConectorBD::ejecutarQuery($cadenaSQL,'codsami');
if(count($resultado)>0) {
$json=array();
for ($i = 0; $i < count($resultado); $i++) {
$json[$i]['idusuario'] = $resultado[$i]['idusuario'];
$json[$i]['clave'] = $resultado[$i]['clave'];
} echo json_encode($json);
} else echo 'false';
?> <file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
* Description of Compras
*
* @author johan
*/
class Compras {
private $idcompra;
private $nombre;
private $descripcion;
private $cantidad;
private $stockminimo;
private $valorcomprauni;
private $valorventauni;
function __construct($campo,$valor) {
if ($campo!=null){
if (is_array($campo)) $this->cargarvector($campo);
else{
$cadenaSQL="select*from compras where $campo='$valor' ";
print_r($cadenaSQL);
$resultado= ConectorBD::ejecutarQuery($cadenaSQL,null);
if (count($resultado)>0) $this->cargarvector($resultado[0]);
}
}
}
private function cargarvector($vector){
$this->idcompra=$vector['idcompra'];
$this->nombre=$vector['nombre'];
$this->descripcion=$vector['descripcion'];
$this->cantidad=$vector['cantidad'];
$this->stockminimo=$vector['stockminimo'];
$this->valorcomprauni=$vector['valorcomprauni'];
$this->valorventauni=$vector['valorventauni'];
}
function getIdcompra() {
return $this->idcompra;
}
function getNombre() {
return $this->nombre;
}
function getDescripcion() {
return $this->descripcion;
}
function getCantidad() {
return $this->cantidad;
}
function getstockminimo() {
return $this->stockminimo;
}
function getValorcomprauni() {
return $this->valorcomprauni;
}
function getValorventauni() {
return $this->valorventauni;
}
function setIdcompra($idcompra) {
$this->idcompra = $idcompra;
}
function setNombre($nombre) {
$this->nombre = $nombre;
}
function setDescripcion($descripcion) {
$this->descripcion = $descripcion;
}
function setCantidad($cantidad) {
$this->cantidad = $cantidad;
}
function setstockminimo($stockminimo) {
$this->stockminimo = $stockminimo;
}
function setValorcomprauni($valorcomprauni) {
$this->valorcomprauni = $valorcomprauni;
}
function setValorventauni($valorventauni) {
$this->valorventauni = $valorventauni;
}
}
<file_sep><?php
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
$reporte="";
if (isset($_POST['nombre'])&&$_POST['nombre']!=NULL){
$nombresmenu=$_POST['nombre'];
$filtro=" where concat(idinventario,nombre) like'%$nombresmenu%'";
}
$cadenasql="SELECT idcompras,nombre,descripcion,valorventauni, SUM(ventasdetalle.cantidad)as cantidad,valorcomprauni from ventasdetalle,compras WHERE idcompras=idcompra GROUP by idcompras $filtro order by cantidad desc limit 10";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$resultado="";
$contador=1;
$contadortotal=1;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
$lista.='<td><img src="presentacion/lib/barcode.php?text='.$datos[$i][0].'&size=20&codetype=code39&print=true "></td>';
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$lista.="<td>{$datos[$i][4]}</td>";
$subtotalventas=$datos[$i][3]*$datos[$i][4];
$subtotalcompras=$datos[$i][5]*$datos[$i][4];
$ganancia=$subtotalventas-$subtotalcompras;
$lista.="<td> $".number_format($ganancia)."</td>";
$lista.="<td> $".number_format($subtotalventas)."</td>";
$lista.="</tr>";
$contador=$contador+1;
$contadortotal+=$subtotalventas;
}
$resultado.="<h2 class='text-center'> Total $ ". number_format($contadortotal)."</h2 > ";
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
?>
<br><br><br>
<div class=" col-md-6 "style="z-index: 100; position: fixed; margin:-1% 0%; background: #236780;color: white;">
<ul class="text-info">
<li><a href="principal.php?CONTENIDO=admon/reportes.php" class="table-hover btn btn-primary " style="margin: 1% 0%;">Productos mas vendidos</a>-- <a href="principal.php?CONTENIDO=admon/gananciasemana.php" style="margin: 1% 0%;"class="table-hover btn btn-primary">Ganancia por semanana</a></li>
</ul>
</div>
<div class="container-fluid">
<br><br>
<H2 >REPORTE DE PRODUCTOS MAS VENDIDOS </H2>
<div class="text-right">
<a href="admon/imprimirreporte.php?export=excel" target="_blank"><img src="presentacion/imagenes/exel.png" title="Exportar a Excel" width="50" height="50"/></a>
<a href="admon/imprimirreporte.php?export=word" target="_blank"><img src="presentacion/imagenes/word.jpg" title="Exportar a Excel" width="50" height="50"/></a>
<a href="admon/imprimirreporte.php?export=pdf" target="_blank"><img src="presentacion/imagenes/pdf.png" title="Exportar a Pdf" width="50" height="50"/></a>
</div>
<table class="table table-responsive table-hover " style="background: white;">
<tr class="table-dark successx"><th>NUMERO</th><th>CODIGO</th><th>NOMBRE</th><th>DESCRIPCION</th><th>CANTIDAD VENDIDA</th> <th>GANANCIA</th> <th>SUBTOTAL</th>
</tr>
<?=$lista?>
</table>
<?=$resultado?>
</div>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
$mensaje="";
if (isset($_GET['mensaje'])) {$mensaje=$_GET['mensaje'];}
?>
<!DOCTYPE html>
<html lang="en">
<head>
<title>CODSAMI</title>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="icon" type="image/png" href="presentacion/css/index/images/iconbarras.PNG"/>
<link rel="stylesheet" type="text/css" href="presentacion/css/index/vendor/bootstrap/css/bootstrap.min.css">
<link rel="stylesheet" type="text/css" href="presentacion/css/index/fonts/font-awesome-4.7.0/css/font-awesome.min.css">
<link rel="stylesheet" type="text/css" href="presentacion/css/index/vendor/animate/animate.css">
<link rel="stylesheet" type="text/css" href="presentacion/css/index/vendor/css-hamburgers/hamburgers.min.css">
<link rel="stylesheet" type="text/css" href="presentacion/css/index/vendor/select2/select2.min.css">
<link rel="stylesheet" type="text/css" href="presentacion/css/index/css/util.css">
<link rel="stylesheet" type="text/css" href="presentacion/css/index/css/main.css">
</head>
<body>
<div class="limiter">
<div class="container-login100">
<div class="wrap-login100">
<div class="login100-pic js-tilt" data-tilt>
<H2>SISTEMA DE INFORMACION PARA MINIMARKET</H2>
</div>
<div class="login100-pic js-tilt" data-tilt>
<img src="presentacion/css/index/images/INDEX.jpg" alt="IMG">
</div>
<div class="login100-pic js-tilt" data-tilt>
<img src="presentacion/css/index/images/index2.jpg" alt="IMG">
</div>
<form class="login100-form validate-form" name="formulario" method="POST" action="validar.php">
<span class="login100-form-title">
LOGIN
<h5 style="color: red;"><?=$mensaje?></h5>
</span>
<div class="wrap-input100 validate-input " data-validate = "usuario requerido ">
<input class="input100 form-control" type="text" name="usuario" placeholder="USUARIO">
<span class="focus-input100"></span>
<span class="symbol-input100">
<i class="fa fa-envelope" aria-hidden="true"></i>
</span>
</div>
<div class="wrap-input100 validate-input " data-validate = "ingrese contraseña">
<input class="input100 form-control" type="password" name="clave" placeholder="<PASSWORD>">
<span class="focus-input100"></span>
<span class="symbol-input100">
<i class="fa fa-lock" aria-hidden="true"></i>
</span>
</div>
<div class="container-login100-form-btn">
<input class="login100-form-btn" type="submit" value="INGRESAR">
</div>
</form>
</div>
</div>
</div>
<script src="presentacion/css/index/vendor/jquery/jquery-3.2.1.min.js"></script>
<script src="presentacion/css/index/vendor/bootstrap/js/popper.js"></script>
<script src="presentacion/css/index/vendor/bootstrap/js/bootstrap.min.js"></script>
<script src="presentacion/css/index/vendor/select2/select2.min.js"></script> <script src="presentacion/css/index/vendor/tilt/tilt.jquery.min.js"></script>
<script >
$('.js-tilt').tilt({
scale: 1.1
})
</script>
<script src="presentacion/css/index/js/main.js"></script>
</body>
</html>
<file_sep><?php
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
$reporte="";
if (isset($_POST['desde'])&&$_POST['desde']!=NULL&&isset($_POST['hasta'])&&$_POST['hasta']!=NULL){
$desde=$_POST['desde'];
$hasta=$_POST['hasta'];
$filtro=" and fechasistema>='$desde' and fechasistema<='$hasta'";
}
$cadenasql="SELECT SUBSTRING(fechasistema, -19,10)as fechasistema,sum(ventasdetalle.cantidad), SUM(ventasdetalle.cantidad*compras.valorventauni),SUM(ventasdetalle.cantidad*compras.valorcomprauni)from ventas,ventasdetalle,compras where ventas.idventa=ventasdetalle.idventa and idcompra=idcompras $filtro group by DATE_FORMAT(fechasistema, '%Y-%m-%d') order by fechasistema DESC limit 7;";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$resultado="";
$contador=1;
$contadortotal="";
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
$lista.="<td>{$datos[$i][0]}</td>";
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$ganancia=$datos[$i][2]-$datos[$i][3];
$lista.="<td>$ganancia</td>";
$lista.="</tr>";
$contador=$contador+1;
$contadortotal+=$ganancia;
}
$resultado.="<h2 class='text-center'> Total Ganancia $ ". number_format($contadortotal)."</h2 > ";
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
?>
<div class=" col-md-6 "style="z-index: 100; margin:1.2% 50%; height: 66px; position: absolute;background: #236780;">
<form method="post" >
<table class="table-responsive-lg table table-dark " >
<tr>
<td>DESDE<td><input class="form-control" type="date" name="desde" ></td>
<td>HASTA<td><input class="form-control" type="date" name="hasta" ></td>
<td><input class=" btn btn-primary"type="submit" value="BUSCAR"></td>
</tr>
</table>
</form>
</div>
<br>
<div class=" col-md-6 "style="z-index: 100; position: fixed; margin:-1% 0%; background: #236780;color: white;">
<ul class="text-info">
<li><a href="principal.php?CONTENIDO=admon/reportes.php" class="table-hover btn btn-primary " style="margin: 1% 0%;">Productos mas vendidos</a>-- <a href="principal.php?CONTENIDO=admon/gananciasemana.php" style="margin: 1% 0%;"class="table-hover btn btn-primary">Ganancia por semanana</a></li>
</ul>
</div>
<div class="container-fluid">
<br><br>
<H2 >REPORTE DE GANANCIAS POR SEMANA </H2>
<div class="text-right">
<a href="admon/gananciareport.php?export=excel" target="_blank"><img src="presentacion/imagenes/exel.png" title="Exportar a Excel" width="50" height="50"/></a>
<a href="admon/gananciareport.php?export=word" target="_blank"><img src="presentacion/imagenes/word.jpg" title="Exportar a Excel" width="50" height="50"/></a>
<a href="admon/gananciareport.php?export=pdf" target="_blank"><img src="presentacion/imagenes/pdf.png" title="Exportar a Pdf" width="50" height="50"/></a>
</div>
<table class="table table-responsive table-hover " style="background: white;">
<tr class="table-dark successx"><th>DIA</th><th>FECHA</th><th>PRODUCTOS VENDIDOS</th> <th>TOTAL</th> <th>GANANCIA</th>
</tr>
<?=$lista?>
</table>
<?=$resultado?>
</div>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/Clases/ConectorBD.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
$cadenasql="select*from usuario where idusuario='$usuario' and clave='$clave'";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
if ($usuario=$datos[0][0]&&$clave==$datos[0][1]){
header("location:principal.php?CONTENIDO=inicio.php");
}else{
$mensaje="ERROR DE USUARIO Y/O CONTRASEÑA";
header("location: index.php?mensaje=".$mensaje);
}
?>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$idInventario = $_POST['idInventario'];
$cadenaSQL = "select * from inventario where idinventario = '$idInventario'";
$datos = ConectorBD::ejecutarQuery($cadenaSQL, NULL);
if(count($datos)>0){
echo "{$datos[0]['idinventario']}=={$datos[0]['nombre']}=={$datos[0]['valor']}==1";
}else{
echo 'null';
}
?>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__). '/../Clases/ConectorBD.php';
foreach ($_POST as $Variable=> $valor) ${$Variable}=$valor;
foreach ($_GET as $Variable=> $valor) ${$Variable}=$valor;
$cadena="select*from usuario where clave='$claveactual'";
$datos= ConectorBD::ejecutarQuery($cadena, null);
if (count($datos)>0){
if ($clavenueva==$confirmarclave) {
$cadena="update usuario set idusuario='$usuario',clave='$clavenueva' where idusuario='{$datos[0][0]}'";
$datos= ConectorBD::ejecutarQuery($cadena, null);
} else {$mensaje="No concuerdan las contraseñas nuevas";
header("Location:principal.php?CONTENIDO=admon/cambioclaveadmin.php&mensaje=$mensaje&claveactual=$claveactual&clavenueva=$clavenueva&confirmarclave=$confirmarclave ") ;
}
}
else{
$mensaje="las contraseña actual es incorrecta";
header("Location:principal.php?CONTENIDO=admon/cambioclaveadmin.php&mensaje=$mensaje&claveactual=$claveactual&clavenueva=$clavenueva&confirmarclave=$confirmarclave ") ;
}
?>
<script>
alert("Contraseña Modificada");
location="index.php?mensaje=Ingrese de nuevo al sistema";
</script>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__). '/../Clases/ConectorBD.php';
if (isset($_GET['mensaje'])) $mensaje=$_GET['mensaje'];
else $mensaje='';
if (isset($_GET['claveactual'])) $claveactual=$_GET['claveactual'];
else $claveactual='';
if (isset($_GET['clavenueva'])) $clavenueva=$_GET['clavenueva'];
else $clavenueva='';
if (isset($_GET['confirmarclave'])) $confirmarclave=$_GET['confirmarclave'];
else $confirmarclave='';
$cadena="select*from usuario ";
$datos= ConectorBD::ejecutarQuery($cadena, null);
?>
<br><br>
<br><br>
<h2>CAMBIO DE CONTRASEÑA </h2>
<div class="container col-md-5"><br>
<form name="formulario" action="principal.php?CONTENIDO=admon/actualizarClave.php" method="post">
<table class="table table-hover table-responsive-lg table-content ">
<font color="red" face="arial"><?= $mensaje?>
<tr><th>Usuario</th><th><input class="form-control" type="text" name="usuario" placeholder="Nuevo usuario" required autofocus value="<?= $datos[0][0]?>"> </th></tr>
<tr><th>Contraseña Actual</th><th><input class="form-control" type="password" name="claveactual" placeholder="Ingrese contraseña actual" required autofocus value="<?= $claveactual?>"> </th></tr>
<tr><th>Contraseña nueva</th><th><input class="form-control"type="password" name="clavenueva" placeholder="Ingrese contraseña nueva" required value="<?= $clavenueva?>"> </th></tr>
<tr><th>Confirmar contraseña</th><th><input class="form-control" type="password" name="confirmarclave" placeholder="repita la contraseña actual" value="<?= $confirmarclave?>"required > </th></tr>
<tr><td></td><td> <input class=" btn btn-primary"type="submit" value="Confirmar"></td></tr>
</table>
</form>
</div><file_sep><html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>amCharts examples</title>
<link rel="stylesheet" href="style.css" type="text/css">
<script src="presentacion/lib/amchar/amcharts/amcharts.js" type="text/javascript"></script>
<script src="presentacion/lib/amchar/amcharts/serial.js" type="text/javascript"></script>
<script>
var chart;
var chartData = [
<?php
require_once dirname(__FILE__) . '/../Clases/ConectorBD.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
$mensajess="";
if (isset($año)&&$año!=NULL) {
$year=$año;
}else{
$year = date("Y");
}
$lista = "";
for ($j = 1; $j <= 12; $j++) {
$cadenaSQL = "select sum(ventasdetalle.cantidad*valorventauni)as total from ventasdetalle,compras,ventas WHERE idcompras=idcompra and ventas.idventa=ventasdetalle.idventa and fechasistema BETWEEN '$year-$j-01' AND '$year-$j-31' ";
$datos = ConectorBD::ejecutarQuery($cadenaSQL, null);
$meses = array('', 'Enero', 'Febrero', 'Marzo', 'Abril', 'Mayo', 'Junio', 'Julio', 'Agosto', 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre');
$valormesess = 0;
if (count($datos) > 0) {
$lista .= "<tr><th>".$meses[$j]."</th>";
$lista .= "<th>$ {$datos[0]['total']}</th></tr>";
$valormeses = $datos[0]['total'];
}
?>
{
"country": "<?= $meses[$j] ?>",
"visits": <?= $valormeses + $valormesess ?>
},
<?php
}
?>
];
AmCharts.ready(function () {
// SERIAL CHART
chart = new AmCharts.AmSerialChart();
chart.dataProvider = chartData;
chart.categoryField = "country";
chart.startDuration = 1;
// AXES
// category
var categoryAxis = chart.categoryAxis;
categoryAxis.labelRotation = 90;
categoryAxis.gridPosition = "start";
// value
// in case you don't want to change default settings of value axis,
// you don't need to create it, as one value axis is created automatically.
// GRAPH
var graph = new AmCharts.AmGraph();
graph.valueField = "visits";
graph.balloonText = "[[category]]: <b>[[value]]</b>";
graph.type = "column";
graph.lineAlpha = 0;
graph.fillAlphas = 0.8;
chart.addGraph(graph);
// CURSOR
var chartCursor = new AmCharts.ChartCursor();
chartCursor.cursorAlpha = 0;
chartCursor.zoomable = false;
chartCursor.categoryBalloonEnabled = false;
chart.addChartCursor(chartCursor);
chart.creditsPosition = "top-right";
chart.write("chartdiv");
});
</script>
</head>
<body><br><br><br>
<div class="offset-8 col-md-4 "style="z-index: 100; margin: 0% 65%; position: absolute;background: #236780;">
<form method="post" class="">
<table class="table-responsive-lg table table-dark table-hover " >
<tr>
<th> <img src="presentacion/imagenes/buscarpequeño.png"></span></th><td><input class="form-control" type="text" autofocus name="año" placeholder="AÑO" ></td>
<td><input class="btn-primary"type="submit" value="BUSCAR"></td>
</tr>
</table>
</form>
</div>
<div class="container-fluid">
<h2>INDICADOR</h2>
<h4>ventas de Mes</h4>
<div class="container-fluid row">
<div class="col-md-6">
<div id="chartdiv" style="width: 100%; height: 500px; margin: 0% 0%;"></div>
</div>
<div class=" col-md-6" >
<H2>VENTAS DEL AÑO <?=$year?></H2>
<table class="table table-hover table-responsive-lg">
<thead class="table-dark"><th>FECHA</th><th>Total ventas</th></thead>
<?= $lista ?>
</table>
</div>
</div>
</div>
</body>
</html><file_sep><?php
header('HTTP/1.1 200 OK');
header ('Content-Type: application/json;charset=UTF-8');
require_once '../Clases/ConectorBD.php';
$codigo=$_POST['codigo'];
$cadenaSQL="SELECT idinventario,nombre,descripcion,cantidad,stockminimo,valor FROM inventario where idinventario=$codigo ";
$resultado=ConectorBD::ejecutarQuery($cadenaSQL,'codsami');
echo json_encode($resultado);
?><file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$datosCadena = $_POST['cadenaFinal'];
$datosCadenaArray = explode("=:=", $datosCadena);
$cadenaSQLVenta = "insert into ventas(fechasistema) values(now())";
ConectorBD::ejecutarQuery($cadenaSQLVenta, null);
$id = ConectorBD::ejecutarQuery("select max(idventa) from ventas", null)[0][0];
for ($i = 0; $i < count($datosCadenaArray); $i++) {
$datosArray = explode("==", $datosCadenaArray[$i]);
$cadenaSQLDetalles = "insert into ventasdetalle(idcompras, idventa, cantidad) values('$datosArray[0]', $id, '$datosArray[3]')";
ConectorBD::ejecutarQuery($cadenaSQLDetalles, null);
$cadena11="select cantidad from inventario where idinventario='$datosArray[0]'";
$datos=ConectorBD::ejecutarQuery($cadena11, NULL);
$cantidatot=$datos[0][0]-$datosArray[3];
$cadena111="update inventario set cantidad=$cantidatot where idinventario='$datosArray[0]'";
ConectorBD::ejecutarQuery($cadena111, NULL);
}
header("Location: ../principal.php?CONTENIDO=inicio.php");
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
require_once dirname(__FILE__).'/../Clases/Compras.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
foreach ($_GET as $Variable => $Valor) ${$Variable}=$Valor;
$datos=ConectorBD::ejecutarQuery($cadenasql, null);
$lista="";
$contador=1;
if (count($datos)>0){
$lista.="<H5 style='color:red;'>ya existe el codigo de barras en base de datos</H5>";
}
echo $lista;<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
foreach ($_POST as $Variable => $Valor) ${$Variable}=$Valor;
foreach ($_GET as $Variable => $Valor) ${$Variable}=$Valor;
$cadenasql="select nombre,descripcion,ventasdetalle.cantidad,valorventauni,ventasdetalle.cantidad*valorventauni as subtotal from compras,ventasdetalle where idcompras=idcompra and idventa='$idventa'";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$venta="";
$contador=1;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
$lista.="<td>{$datos[$i][0]}</td>";
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$lista.="<td>$ ". number_format($datos[$i][3])."</td>";
$lista.="<td>$ ". number_format($datos[$i][4])."</td>";
$lista.="</tr>";
$contador=$contador+1;
}
$cadenasql1="select sum(ventasdetalle.cantidad*valorventauni)as total from ventasdetalle,compras where idcompras=idcompra and idventa='$idventa' ";
$datos1= ConectorBD::ejecutarQuery($cadenasql1, NULL);
$venta="<H2 class='text-center'> Total venta ". number_format($datos1[0][0])."</H2>";
} else {
$lista.="<tr><td style='color:red;'>No se encuentra productos en esta factura<td><tr>";
}
?>
<br><br><br>
<H2 >DETALLE DE LA VENTA <?= strtoupper($idventa)?> </H2>
<table class="table table-responsive table-hover" style="background: white;">
<tr class="table-dark successx"><th>ITEM</th><th>PRODUCTO</th><th>DESCRIPCION</th><th>CANTIDAD</th><th>VALOR</th><th>SUBTOTAL</th>
</tr>
<?=$lista?>
</table>
<?=$venta?>
<file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__) . '/Clases/ConectorBD.php';
date_default_timezone_set('America/Bogota');
$horayfecha=date( ' o-j-Y h:i:s A');
?>
<div class="container-fluid container " id="contenido">
<br> <br><br><br>
<h2 class="text-right text-info" style="margin: -2% 70%; position: absolute;"> VENTAS</h2>
<div class="" style="z-index: 100; margin: -80%; position: absolute;">
<form method="post" class="" name="formulario" onsubmit="return consultarProducto()">
<input class="form-control" type="text" id="nombre" name="nombre" onblur="this.focus()">
</form>
</div>
<div class="container-fluid">
<h3 class="text-left" style="font-weight: bold; font-size: 30px; font-family: Teamviwer;">DETALLES DE VENTA</h3>
<hr>
<table id='tabla' class="table table-striped " >
<tr class="cabecera">
<th>Item</th><th>Producto</th><th>Cantidad</th><th>Valorunitario</th><th>Subtotal</th>
<th><img src='Presentacion/imagenes/Adicionar.png' width="30" height="30"title='Adicionar' data-toggle='modal' data-target='#exampleModal'></th>
</tr>
<thead>
<tbody class="contenidoProductos">
</tbody>
</table>
<h3 class="text-center" style="font-size: 30px; font-weight: bold;">TOTAL: <span id="subTotalFinal">0</span></h3>
<form name="formularioEnvio" method="post" action="admon/ventaAcutalizar.php" onsubmit="return guradarVenta()">
<input class="cadenaFinal" type="hidden" name="cadenaFinal">
<button class="btn btn-primary center-block" >GUARDAR</button><br>
</form>
<h4>FECHA VENTA:<label><?=$horayfecha?></label> </h4>
</div>
</div>
<!-- Modal -->
<div class="modal " id="exampleModal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title text-center" id="exampleModalLabel ">ADICIONAR PRODUCTO</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body" id="carga">
<input type="text" class="form-control" placeholder="Buscar por Codigo o Nombre" onchange="buscarproducto()" id="codigoBuscar" />
<!--<button class="btn-primary" onclick="buscarproducto()">Buscar</button>-->
<!--cargar productos-->
<!-- <button onclick="consultarproducto()">cargar productos</button>-->
<table id="cargarbuscador"></table>
<!-- <table id="cargar"></table>-->
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
<button value="" type="button" class="btn btn-primary" id="btn_adicionar" data-dismiss="modal" onclick="agregarManualMente(this.value)">adicionar</button>
</div>
</div>
</div>
</div>
<!-- Fin moda -->
<!-- script -->
<script type="text/javascript">
//<
function ponleFocus(){
document.getElementById("nombre").focus();
}
ponleFocus();
var datosCadena = "null";
var subTotalFinal = 0;
function consultarProducto() {
var xmlhttp;
if (window.XMLHttpRequest) {
xmlhttp = new XMLHttpRequest();
} else {
xmlhttp = new ActiveXObject("Microsoft.XMLHTTP");
}
var valores = "idInventario=" + document.formulario.nombre.value;
xmlhttp.onreadystatechange = function () {
if (xmlhttp.readyState === 4 && xmlhttp.status === 200) {//verificamos que todo se correcto
var datos = xmlhttp.responseText
if (datos != "null") {
var datosCadenaArray = datosCadena.split("=:=");
var datosArray = datos.split("==");
var indiceBuscarProducto = buscarProducto(datosArray[0]);
if (indiceBuscarProducto < 0) {
if (datosCadena != "null") {
datosCadena += "=:=" + datos;
} else {
datosCadena = datos;
}
var item = datosCadenaArray.length;
if(datosCadenaArray[0] != "null"){
item=item+1
}
subTotalFinal= subTotalFinal+parseInt(datosArray[2]);
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>1</td><td>" + datosArray[2] + "</td><td>" + datosArray[2] + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
} else {
document.getElementsByClassName("contenidoProductos")[0].innerHTML="";
datosCadena = "null";
var datosArrrayAntiguos = datosArray;
subTotalFinal = 0;
for (var i = 0; i < datosCadenaArray.length; i++) {
var item = i+1;
var datosArray = datosCadenaArray[i].split("==");
if(i == indiceBuscarProducto){
var cantidad = parseInt(datosArray[3])+parseInt(datosArrrayAntiguos[3]);
var subTotal = parseInt(datosArray[2])*cantidad;
subTotalFinal+=subTotal;
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>"+cantidad+"</td><td>" + datosArray[2] + "</td><td>" + subTotal + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
var cadenaNueva = datosArray[0]+"=="+datosArray[1]+"=="+datosArray[2]+"=="+cantidad;
regrabarCadena(cadenaNueva);
}else{
var subTotal = parseInt(datosArray[2])*parseInt(datosArray[3]);
subTotalFinal+=subTotal;
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>"+datosArray[3]+"</td><td>" + datosArray[2] + "</td><td>" + subTotal + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
var cadenaNueva = datosArray[0]+"=="+datosArray[1]+"=="+datosArray[2]+"=="+datosArray[3];
regrabarCadena(cadenaNueva);
}
}
}
} else {
alert("El producto no esta registrado base de datos");
}
document.getElementById('subTotalFinal').innerHTML=subTotalFinal;
}
}
xmlhttp.open("POST", "admon/consultarProducto.php", true);
xmlhttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xmlhttp.send(valores);
document.formulario.nombre.value="";
$('#nombre').focus();
return false;
}
function elimianrProducto(codigo, accion){
var indiceBuscarProducto = buscarProducto(codigo);
var datosCadenaArray = datosCadena.split("=:=");
datosCadena = "null";
document.getElementsByClassName("contenidoProductos")[0].innerHTML ="";
subTotalFinal = 0;
for (var i = 0; i < datosCadenaArray.length; i++) {
var item = i+1;
var datosArray = datosCadenaArray[i].split("==");
if(i == indiceBuscarProducto){
if(accion == 0){
var cantidad = parseInt(datosArray[3])-1;
if(cantidad != 0){
var subTotal = parseInt(datosArray[2])*cantidad;
subTotalFinal+=subTotal;
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>"+cantidad+"</td><td>" + datosArray[2] + "</td><td>" + subTotal + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
var cadenaNueva = datosArray[0]+"=="+datosArray[1]+"=="+datosArray[2]+"=="+cantidad;
regrabarCadena(cadenaNueva);
}
}
}else{
var subTotal = parseInt(datosArray[2])*parseInt(datosArray[3]);
subTotalFinal+=subTotal;
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>"+datosArray[3]+"</td><td>" + datosArray[2] + "</td><td>" + subTotal + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
var cadenaNueva = datosArray[0]+"=="+datosArray[1]+"=="+datosArray[2]+"=="+datosArray[3];
regrabarCadena(cadenaNueva);
}
}
$('#nombre').focus();
document.getElementById('subTotalFinal').innerHTML=subTotalFinal;
}
function buscarProducto(codigo) {
datosArray = datosCadena.split('=:=');
for (var i = 0; i < datosArray.length; i++) {
var codigoA = datosArray[i];
codigoA = codigoA.split("==")[0];
if (codigoA == codigo) {
return i;
}
}
return -1
}
function regrabarCadena(datos){if (datosCadena != "null") {datosCadena += "=:=" + datos;} else {datosCadena = datos;} }
function guradarVenta(){
document.getElementsByClassName("cadenaFinal")[0].value=datosCadena;
document.formulario.nombre.focus()
if(datosCadena != "null"){
if(confirm("Desea realizar la venta?")){
return true;
}else{
return false;
}
}else{
alert("Ingese un producto");
return false;
}
$('#nombre').focus();
}
function agregarManualMente(productosCadena){
var datos = productosCadena;
if (datos != "null") {
var datosCadenaArray = datosCadena.split("=:=");
var datosArray = datos.split("==");
var indiceBuscarProducto = buscarProducto(datosArray[0]);
if (indiceBuscarProducto < 0) {
if (datosCadena != "null") {
datosCadena += "=:=" + datos;
} else {
datosCadena = datos;
}
var item = datosCadenaArray.length;
if(datosCadenaArray[0] != "null"){
item=item+1
}
subTotalFinal= subTotalFinal+parseInt(datosArray[2]);
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>1</td><td>" + datosArray[2] + "</td><td>" + datosArray[2] + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
} else {
document.getElementsByClassName("contenidoProductos")[0].innerHTML="";
datosCadena = "null";
var datosArrrayAntiguos = datosArray;
subTotalFinal = 0;
for (var i = 0; i < datosCadenaArray.length; i++) {
var item = i+1;
var datosArray = datosCadenaArray[i].split("==");
if(i == indiceBuscarProducto){
var cantidad = parseInt(datosArray[3])+parseInt(datosArrrayAntiguos[3]);
var subTotal = parseInt(datosArray[2])*cantidad;
subTotalFinal+=subTotal;
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>"+cantidad+"</td><td>" + datosArray[2] + "</td><td>" + subTotal + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
var cadenaNueva = datosArray[0]+"=="+datosArray[1]+"=="+datosArray[2]+"=="+cantidad;
regrabarCadena(cadenaNueva);
}else{
var subTotal = parseInt(datosArray[2])*parseInt(datosArray[3]);
subTotalFinal+=subTotal;
document.getElementsByClassName("contenidoProductos")[0].innerHTML += "<tr><td>" + item + "</td><td>" + datosArray[1] + "</td><td>"+datosArray[3]+"</td><td>" + datosArray[2] + "</td><td>" + subTotal + "</td><td><button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 0)'>-</button> <button class='btn-primary' value='"+datosArray[0]+"' onclick='elimianrProducto(this.value, 1)'>x</button></td><tr>";
var cadenaNueva = datosArray[0]+"=="+datosArray[1]+"=="+datosArray[2]+"=="+datosArray[3];
regrabarCadena(cadenaNueva);
}
}
document.getElementById('exampleModal').style.display="none";
}
} else {
alert("El producto no esta disponible");
}
document.getElementById('subTotalFinal').innerHTML=subTotalFinal;
$('#nombre').focus();
$('#cargarbuscador').html()("");
}
//>
//
//
//para cargar el buscador en la modal
function buscarproducto() {
// $('#cargarbuscador').val('');
var codigo = $('#codigoBuscar').val();
if (codigo != '') {
var $cadenasql = "select*From inventario where concat(idinventario,nombre) like'%" + codigo + "%'";
$.ajax({
url: 'admon/consultaproducto.php',
type: 'post',
data: {cadenasql: $cadenasql},
success: function (data, textStatus, jqXHR) {
lista = "<table id='tabla' class='table' ><tr><th>Item</th><th>Producto</th><th>Cantidad</th><th>Valorunitario</th><th>Subtotal</th></tr>";
if(data != "null"){
dataArray = data.split("==");
var item = dataArray.length;
if(dataArray[0] != "null"){
item=item+1
}
var fila = "<tr><td>"+1+"</td><td>"+dataArray[1]+"</td><td>1</td><td>"+dataArray[2]+"</td><td>"+dataArray[2]+"</td></tr>"
lista += fila;
lista += "<table>";
$('#cargarbuscador').html(lista);
}else{
alert("producto agotado");
}
document.getElementById("btn_adicionar").value=data;
}
});
} else {
$('#cargarbuscador').html('<H4>Escribe algo</H4>');
}
$('#codigoBuscar').val("");
}
//para cargar todos los producto en la modal
// function consultarproducto() {
// var $cadenasql = "select*From inventario";
// $.ajax({
// url: 'admon/consultaproducto.php',
// type: 'post',
// data: {cadenasql: $cadenasql},
// success: function (data, textStatus, jqXHR) {
// var lista = "<table border='2' id='tabla' class='table' ><tr><th>Item</th><th>Producto</th><th>Cantidad</th><th>Valorunitario</th><th>Subtotal</th>";
// lista += data;
// lista += "<table>";
// $('#cargar').html(lista);
// }
// });
// }
</script><file_sep><?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
if (isset($_POST['nombre'])&&$_POST['nombre']!=NULL){
$nombresmenu=$_POST['nombre'];
$filtro=" where concat(idcompra,nombre) like'%$nombresmenu%'";
}
$cadenasql="select*from compras $filtro";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$contador=1;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
// $lista.='<td><img src="presentacion/lib/barcode.php?text='.$datos[$i][0].'&size=20&codetype=code39&print=true "></td>';
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$lista.="<td>{$datos[$i][3]}</td>";
$totalcompra=$datos[$i][5]*$datos[0][3];
$lista.="<td>$". number_format($totalcompra)."</td>";
$lista.="<td> ".number_format($datos[$i][6])."</td>";
$totalventa=$datos[$i][6]*$datos[0][3]-$totalcompra;
$lista.="<td>$". number_format($totalventa)."</td>";
$lista.="<td>$". number_format($totalventa+$totalcompra)."</td>";
$lista.='<th><a href="principal.php?CONTENIDO=admon/formulariocompra.php&accion=Modificar&idcompra='.$datos[$i][0].'" ><img src="presentacion/imagenes/Modificar.png" title="MODIFICAR" /></a>'
. '<img src="presentacion/imagenes/Eliminar.png" title="ELIMINAR" onclick="eliminar('.$datos[$i][0].') " /></th>';
$lista.="</tr>";
$contador=$contador+1;
}
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
?>
<div class="container-fluid">
<div class="offset-8 col-md-4 "style="z-index: 100; margin:5% 65%; position: absolute;background: #236780;">
<form method="post" class="">
<table class="table-responsive-lg table table-dark " >
<tr>
<th> <img src="presentacion/imagenes/buscarpequeño.png"></span></th><td><input class="form-control" type="text" name="nombre" placeholder="Nombre o codigo" ></td>
<td><input class="btn-primary"type="submit" value="BUSCAR"></td>
</tr>
</table>
</form>
</div>
<br><br><br>
<H2 >COMPRAS </H2>
<table class="table table-responsive table-hover" style="background: white;">
<tr class="table-dark successx"><th>NUMERO</th><th>NOMBRE</th><th>DESCRIPCION</th><th>CANTIDAD</th><th>VALOR COMPRA UNI</th><th>VALOR VENTA UNI</th> <th>GANANCIA</th> <th> VALOR TOTAL</th>
<th><a href="principal.php?CONTENIDO=admon/formulariocompra.php&accion=Adicionar" accesskey="a" ><img src="presentacion/imagenes/Adicionar.png" title="ADICIONAR" </a></th>
</tr>
<?=$lista?>
</table>
</div>
<script>
function eliminar(idcompra){
if(confirm("Desea eliminar este registro"))
location="principal.php?CONTENIDO=admon/actualizarcompra.php&accion=ELIMINAR&idcompra="+idcompra;
}
$(document).bind('keydown', 'f1', function(){
location="principal.php?CONTENIDO=admon/formulariocompra.php&accion=Adicionar"
});
</script>
<file_sep><?php
require_once dirname(__FILE__).'/../Clases/ConectorBD.php';
$filtro="";
$reporte="";
if (isset($_POST['desde'])&&$_POST['desde']!=NULL&&isset($_POST['hasta'])&&$_POST['hasta']!=NULL){
$desde=$_POST['desde'];
$hasta=$_POST['hasta'];
$filtro=" and fechasistema>='$desde' and fechasistema<='$hasta'";
}
$cadenasql="SELECT SUBSTRING(fechasistema, -19,10)as fechasistema,sum(ventasdetalle.cantidad), SUM(ventasdetalle.cantidad*compras.valorventauni),SUM(ventasdetalle.cantidad*compras.valorcomprauni)from ventas,ventasdetalle,compras where ventas.idventa=ventasdetalle.idventa and idcompra=idcompras $filtro group by DATE_FORMAT(fechasistema, '%Y-%m-%d') order by fechasistema DESC limit 7;";
$datos= ConectorBD::ejecutarQuery($cadenasql, NULL);
$lista="";
$resultado="";
$contador=1;
$contadortotal=0;
if(count($datos)>0){
for ($i = 0; $i < count($datos); $i++) {
$lista.="<tr>";
$lista.="<td>{$contador}</td>";
$lista.="<td>{$datos[$i][0]}</td>";
$lista.="<td>{$datos[$i][1]}</td>";
$lista.="<td>{$datos[$i][2]}</td>";
$ganancia=$datos[$i][2]-$datos[$i][3];
$lista.="<td>$ganancia</td>";
$lista.="</tr>";
$contador=$contador+1;
$contadortotal+=$ganancia;
}
$resultado.="<h2 class='text-center'> Total Ganancia $ ". number_format($contadortotal)."</h2 > ";
} else {
$lista.="<tr><td style='color:red;'>No se encuentra registrado en la base de datos<td><tr>";
}
if(isset($_GET['export'])) {
if($_GET['export']=='excel'){
$filename = "reporteganacia.xls";
header("Content-Type: application/vnd.ms-excel");
header("Content-Disposition: attachment; filename=".$filename);
}
}
if(isset($_GET['export'])) {
if($_GET['export']=='word'){
$filename = "reporteganacia.doc";
header("Content-Type: application/vnd.ms-word");
header("Content-Disposition: attachment; filename=".$filename);
}
} if(isset($_GET['export'])) {
if($_GET['export']=='pdf'){
require_once dirname(__FILE__) . '/../presentacion/lib/mpdf-master/mpdf.php';
$html = '<H2 >REPORTE DE GANANCIAS POR SEMANA </H2>
<table class="table table-responsive table-hover " style="background: white;">
<tr class="table-dark successx"><th>DIA</th><th>FECHA</th><th>PRODUCTOS VENDIDOS</th> <th>TOTAL</th> <th>GANANCIA</th>
</tr>';
$html.=$lista;
$html.='</table>';
$mpdf=new mPDF('c');
$mpdf->WriteHTML($html);
$mpdf->Output();
exit();
}
}
?>
<center>
<div class="container-fluid ">
<br><br>
<H2 >REPORTE DE GANANCIAS POR SEMANA </H2>
<table class="table table-responsive table-hover " style="background: white;">
<tr class="table-dark successx"><th>DIA</th><th>FECHA</th><th>PRODUCTOS VENDIDOS</th> <th>TOTAL</th> <th>GANANCIA</th>
</tr>
<?=$lista?>
</table>
<?=$resultado?>
</div>
</center>
| e283240446b17251b0e10be2562c4d0b32fa8b6b | [
"PHP",
"INI"
] | 28 | PHP | Alexis-Caratar/codsami | 697d9ab4553ad67b5743db0703167a9d1bcdeedd | 9abdbc2994073aa357cd251c193010eb0776ace4 |
refs/heads/master | <repo_name>abrahamibli/idea_x<file_sep>/src/App.js
import React, { Component } from 'react';
import { BrowserRouter as Router, Route, Switch } from "react-router-dom";
import Login from './components/Login.js';
import Main from './components/Main.js';
import { ProtectedRoute } from './components/protected.route.js';
class App extends Component {
// Para autentificacion en un futuro
constructor() {
super();
this.state = {
loggedInStatus: "NOT_LOGGED_IN",
user: {}
}
}
render() {
return (
<Router >
<div className="app">
<Switch>
<Route exact path={"/"} component={Login} />
<ProtectedRoute exact path={"/main"} component={Main} />
<Route path={"*"} component={() => "404 Not Found"} />
</Switch>
</div>
</Router>
);
}
}
export default App;
<file_sep>/src/components/Navigation.js
import React, { Component } from 'react';
import 'materialize-css/dist/css/materialize.min.css';
import '../App.css';
import { NavLink } from 'react-router-dom';
class Navigation extends Component {
render() {
return <nav>
<div className="nav-wrapper blue-grey darken-1">
<NavLink className="brand-logo" to="/main">Loud Gaming</NavLink>
<ul className="right hide-on-med-and-down">
<li><NavLink className="waves-effect waves-light btn" to="/main/home">Inicio</NavLink></li>
<li><NavLink className="waves-effect waves-light btn" to="#">Coleccion</NavLink></li>
<li><NavLink className="waves-effect waves-light btn" to="#">Requisitos</NavLink></li>
</ul>
</div>
</nav>
}
}
export default Navigation; | d71da7eb632a12c0c9ad4cd8e202c44785d459e2 | [
"JavaScript"
] | 2 | JavaScript | abrahamibli/idea_x | 5f0f618eb20b852269f249e11105923ea0977a98 | 81c0035993aeadbab7f29a5b5ff84249ecd5c9ca |
refs/heads/master | <repo_name>brownsarahm/python-novice-gapminder-files<file_sep>/instructor_resources/post_workshop_README.md
# UCSF Post Workshop Python Downloads
I used github to host the content that you downloaded as zip files this weekend. I used a branch ucsf on Saturday to have your downloaded zip name be python-novie-gapminder-files-ucsf/ on Sunday, I made a different branch that was only the new files for the mornign download. The current branch ucsfpostworkshop contains my files from both days.
## First Time Download
The command below will download the repository and the branch ucsfpostworkshop so that you can have all of the content.
```bash
git clone https://github.com/brownsarahm/python-novice-gapminder-files.git --branch ucsfpostworkshop
```
You could get to the same place by the following two lines
```bash
git clone https://github.com/brownsarahm/python-novice-gapminder-files.git
git checkout ucsfpostworkshop
```
but git allows us to do it all in one command.
## Just updates
If you made it to the very end of the workshop and already downloaded the content once you can use the following to pull just the updates that I made after the workshop finished.
```bash
cd your/actual/path/python-novice-gapminder-files/
git pull
```
If you downloaded and made changes to the notebooks, your merge may fail, with a warning about which of your changes will be overwritten. Since Jupyter notebooks are not plain text files, it can be hard to merge them. You have a few options for dealing with this.
If you just ran the notebooks and didn't make any changes that are important fo you to keep you can reset it to the last commit (which will be one of mine) with
```bash
git reset HEAD --hard
```
Then you should be able to pull.
If you made changes that you want to keep, you can change the filenames of those notebooks, then git will be able to pull my changes without caring that you have other files.
```bash
mv original_notebook.ipynb my_copy_original_notebook.ipynb
```
Now, git pull should work. If you still have problems, submit an issue on this reposity that includes all of the steps you did.
<file_sep>/code/ex_library.py
## Library Excercise 1
What function from the math module can you use to calculate a square root without using sqrt?
Since the library contains this function, why does sqrt exist?
# What would you need to do to check these Excercises?
# HINT: how could the %%writefile magic help?
## Library Excercise 2
# Change the order, add, and/or remove lines to make the following print out a
# random base
bases="ACTTGCTTGAC"
import math
import random
len(bases)
len(bases)+1
math.floor(s1)
math.ceil(s1)
print("random base ",bases[])
random.random()*l
## Library Excercise 2
#Match the following print statements with the appropriate library calls.
#Print commands:
print("sin(pi/2) =",sin(pi/2))
print("sin(pi/2) =",m.sin(m.pi/2))
print("sin(pi/2) =",math.sin(math.pi/2))
#Library calls:
from math import sin,pi
import math
import math as m
from math import *
<file_sep>/code/ex_builtin.py
## Built-in Functions Excercise 1
1. Predict what each of the `print` statements in the program below will `print`.
1. Does `max(len(rich), poor)` run or produce an error message? If it runs, does its result make any sense?
easy_string = "abc"
print(max(easy_string))
rich = "gold"
poor = "tin"
print(max(rich, poor))
print(max(len(rich), len(poor)))
max(len(rich), poor)
## Built-in Functions Excercise 2
If Python starts counting from zero, and len returns the number of characters in
a string, what index expression will get the last character in the string name?
(Note: we will see a simpler way to do this in a later episode.)
name = "yournamehere"
name[]
<file_sep>/code/ex_tabular.py
## Data Excercise 1
# Read the data in gapminder_gdp_americas.csv (which should be in the same directory as gapminder_gdp_oceania.csv) into a variable called americas and display its summary statistics.
## Data Excercise 2
After reading the data for the Americas, use help(americas.head) and help(americas.tail)
to find out what DataFrame.head and DataFrame.tail do.
# Display the first three rows of this data
# Display the last three columns of this data?
# (Hint: you may need to change your view of the data.)
## Data Excercise 3
As well as the read_csv function for reading data from a file, Pandas provides a
to_csv function to write dataframes to files. Applying what you’ve learned
about reading from files, write one of your dataframes to a file called
processed.csv. You can use help to get information on how to use to_csv.
<file_sep>/code/ex_plotting.py
## Plotting Excercise 1
# Fill in the blanks below to plot the minimum GDP per capita over time for all
# the countries in Europe.
data_europe = pandas.read_csv('data/gapminder_gdp_europe.csv', index_col='country')
data_europe.____.plot(label='min')
data_europe.____
plt.legend(loc='best')
plt.xticks(rotation=90)
# Modify it again to plot the maximum GDP per capita over time for Europe.
## Plotting Excercise 2
# add annotation for the new keywords introduced in this plot
data_all = pandas.read_csv('data/gapminder_all.csv', index_col='country')
data_all.plot(kind='scatter', x='gdpPercap_2007', y='lifeExp_2007',
s=data_all['pop_2007']/1e6)
<file_sep>/code/ex_types.py
# ex0: split this into multiple cells and choose the either python or markdown for each
## Types Excercise 1
# What type of value is 3.4? How can you find out?
## Types Excercise 2
# What type of value is 3.25 + 4?
## Types Excercise 3
What type of value (integer, floating point number, or character string) would you use to represent each of the following? Try to come up with more than one good answer for each problem. For example, in # 1, when would counting days with a floating point variable make more sense than using an integer?
1. Number of days since the start of the year.
1. Time elapsed since the start of the year.
1. Serial number of a piece of lab equipment.
3. A lab specimen’s age.
1. Current population of a city.
1. Average population of a city over time.
Note: _see in markdown that those number display as incementing even though in
the plain source they're not in order!. This means you can make a list and then add
a new item in the middle of the list without having to change the numbers_
## Types Excercise 4
# Which of the following will have a value of 2.0?
# Note: there may be more than one right answer.
first = 1.0
second = "1"
third = "1.1"
a = first + float(second)
b = float(second) + float(third)
c = first + int(third)
d = first + int(float(third))
e= int(first) + int(float(third))
f = 2.0 * second
## Types Excercise 5
Python provides complex numbers, which are written as 1.0+2.0j. If val is an imaginary number, its real and imaginary parts can be accessed using dot notation as val.real and val.imag.
1. Why do you think Python uses j instead of i for the imaginary part?
1. What do you expect 1+2j + 3 to produce?
1. What do you expect ‘4j’ to be? What about 4 j or `4 + j’?
<file_sep>/code/ex_dataframe.py
## DataFrames Excercise 1
# Write an expression to find the Per Capita GDP of Serbia in 2007.
## DataFrames Excercise 2: DataFrame slicing
1. Do the two statements below produce the same output?
1. Based on this, what rule governs what is included (or not) in numerical slices and named slices in Pandas?
print(data.iloc[0:2, 0:2])
print(data.loc['Albania':'Belgium', 'gdpPercap_1952':'gdpPercap_1962'])
## DataFrames Excercise 3
# Improve the following code with comments and better variable names
first = pandas.read_csv('data/gapminder_all.csv', index_col='country')
second = first[first['continent'] == 'Americas']
third = second.drop('Puerto Rico')
fourth = third.drop('continent', axis = 1)
fourth.to_csv('result.csv')
# bonus: is your current working directory the best place to save processed data?
## DataFrames Excercise 4
# Add documentation to the following excerpt and make yourself some notes about
# what `idxmin` and `idxmax` and where you would use them
data = pandas.read_csv('data/gapminder_gdp_europe.csv', index_col='country')
print(data.idxmin())
print(data.idxmax())
## DataFrames Excercise 5
#Generate the following stats as
# GDP per capita for all countries in 1982.
# GDP per capita for Denmark for all years.
# GDP per capita for all countries for years after 1985.
# GDP per capita for each country in 2007 as a multiple of GDP per capita for
# that country in 1952.
<file_sep>/code/ex_vars.py
# ex0: split this into multiple cells and choose the either python or markdown for each
## Ex 1
# What is the final value of position in the program below? (Try to predict the value without running the program, then check your prediction.)
initial = "left"
position = initial
initial = "right"
## Ex 2
# If you assign a = 123, what happens if you try to get the second digit of a?
a=123
## Ex 3
Which is a better variable name, m, min, or minutes? Why? Hint: think about which code you would rather inherit from someone who is leaving the lab:
ts = m * 60 + s
tot_sec = min * 60 + sec
total_seconds = minutes * 60 + seconds
## Ex 4
# Think about what this will print before you uncomment and run it?
# atom_name = 'carbon'
# print('atom_name[1:3] is:', atom_name[1:3])
<file_sep>/code/ex_lists.py
## Lists Excercise 1
Complete the code below to generate the following output
```
first time: [1, 3, 5]
second time: [3, 5]
```
values = ____
values.____(1)
values.____(3)
values.____(5)
print('first time:', values)
values = values[____]
print('second time:', values)
## Lists Excercise 2
# run this code, then answer the questions below
print('string to list:', list('tin'))
print('list to string:', ''.join(['g', 'o', 'l', 'd']))
Explain in simple terms what list('some string') does.
What does '-'.join(['x', 'y']) generate?
## Lists Excercise 3
# use this code to answer the questions below
element = 'helium'
print(element[-1])
1. How does Python interpret a negative index?
1. If a list or string has N elements, what is the most negative index that can
safely be used with it, and what location does that index represent?
1. If values is a list, what does del values[-1] do?
How can you display all elements but the last one without changing values? (Hint: you will need to combine slicing and negative indexing.)
<file_sep>/code/ex_swap.py
# Fill the table showing the values of the variables in this program after each statement is executed.
# Command # Value of x # Value of y # Value of swap #
x = 1.0 # # # #
y = 3.0 # # # #
swap = x # # # #
x = y # # # #
y = swap # # # #
# Bonus: split the cells and print out the values after each step to check yourself
<file_sep>/code/notebook_hints.md
## Basic Shortcuts
`Esc` will take you into command mode where you can navigate around your notebook with arrow keys.
### While in command mode:
`A` to insert a new cell above the current cell,
`B` to insert a new cell below.
`M` to change the current cell to Markdown,
`Y` to change it back to code
`D` + `D` (press the key twice) to delete the current cell
`Enter` will take you from command mode back into edit mode for the given cell.
### while in edit mode:
`Ctrl` + `Shift` + `-` will split the current cell into two from where your cursor is.
`Shift` + `Enter` will run the current cell
## Full Shortcut Listing
Cmd + Shift + P (or Ctrl + Shift + P on Linux and Windows)
<file_sep>/code/ex_conditional.py
## Conditionnal Excercise 1
# fill in the blanks to make result have 0 where original has negative values
# and 1 where original has positive values
original = [-1.5, 0.2, 0.4, 0.0, -1.3, 0.4]
result = ____
for value in original:
if ____:
result.append(0)
else:
____
print(result)
## Conditionnal Excercise 2
#Modify this program so that it only processes files with fewer than 50 records.
import glob
import pandas
for filename in glob.glob('data/*.csv'):
contents = pandas.read_csv(filename)
____:
print(filename, len(contents))
## Conditionnal Excercise 3
# Functions will often contain conditionals. Here is a short example that will
# indicate which quartile the argument is in based on hand-coded values for
# the quartile cut points.
def calculate_life_quartile(exp):
if exp < 58.41:
# This observation is in the first quartile
return 1
elif exp >= 58.41 and exp < 67.05:
# This observation is in the second quartile
return 2
elif exp >= 67.05 and exp < 71.70:
# This observation is in the third quartile
return 3
elif exp >= 71.70:
# This observation is in the fourth quartile
return 4
else:
# This observation has bad data
return None
calculate_life_quartile(62.5)
# That function would typically be used within a for loop, but Pandas has a
# different, more efficient way of doing the same thing, and that is by applying
# a function to a dataframe or a portion of a dataframe. Here is an example,
# using the definition above.
data = pd.read_csv('data/gapminder_gdp_americas.csv')
data['life_qrtl'] = data['lifeExp'].apply(calculate_life_quartile)
# Apply this to a second data set and plot them to compare
# Create a function that calculates the life quartiles for a dataset
# Modify your funtion to take two parameters, so that it can use the life
# expectancy that you computed
<file_sep>/code/ex_loops.py.1
## Loop Excercise
use the markdown table template below to trace the output of the cell below. add
lines to the table as needed
| Line no | Variables |
|---------|----------------------|
| 1 | |
| | |
| | |
| | |
_hint: in command mode of a notebook, you can turn on line numbers of a code
cell by pressing l_
total = 0
for char in "tin":
total = total + 1
## Loop Excercise 2 -
# fill in the blanks to complete each of the following
# reverse the string and print nit
original = "tin"
result = ____
for char in original:
result = ____
print(result)
# Total length of the strings in the list: ["red", "green", "blue"] => 12
total = 0
for word in ["red", "green", "blue"]:
____ = ____ + len(word)
print(total)
# List of word lengths: ["red", "green", "blue"] => [3, 5, 4]
lengths = ____
for word in ["red", "green", "blue"]:
lengths.____(____)
print(lengths)
# Concatenate all words: ["red", "green", "blue"] => "redgreenblue"
words = ["red", "green", "blue"]
result = ____
for ____ in ____:
____
print(result)
# Create acronym: ["red", "green", "blue"] => "RGB"
# write the whole thing
## List Excercise 3
1. Read the code below and try to identify what the errors are without running it.
1. Run the code, and read the error message. What type of error is it?
1. Fix the error.
seasons = ['Spring', 'Summer', 'Fall', 'Winter']
print('My favorite season is ', seasons[4])
| fe2060d7df78f044ba645b4bbda4314c70a0ae70 | [
"Markdown",
"Python"
] | 13 | Markdown | brownsarahm/python-novice-gapminder-files | 712a6c179cf110f41140eae476340922e510cce8 | d0bcc5441530b1a211604208b6a2c24b58a97650 |
refs/heads/master | <repo_name>serply-inc/linkedin_scraper<file_sep>/linkedin_scraper.py
import random
import argparse
import requests
import re
parser = argparse.ArgumentParser(description='Searches Google For Linkedin Profiles')
parser.add_argument('--keyword', type=str, help='keywords to search')
parser.add_argument('--limit', type=int, help='how many profiles to scrape')
args = parser.parse_args()
class LinkedinScraper(object):
def __init__(self, keyword, limit):
"""
:param keyword: a str of keyword(s) to search for
:param limit: number of profiles to scrape
"""
self.keyword = keyword.replace(' ', '%20')
self.all_htmls = ""
self.server = 'www.google.com'
self.quantity = '100'
self.limit = int(limit)
self.counter = 0
def search(self):
"""
perform the search
:return: a list of htmls from Google Searches
"""
# choose a random user agent
user_agents = [
'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0) chromeframe/10.0.648.205',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.19 (KHTML, like Gecko) Ubuntu/11.10 Chromium/18.0.1025.142 Chrome/18.0.1025.142 Safari/535.19',
'Mozilla/5.0 (Windows NT 5.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00'
]
while self.counter < self.limit:
headers = {'User-Agent': random.choice(user_agents)}
url = 'http://google.com/search?num=100&start=' + str(self.counter) + '&hl=en&meta=&q=site%3Alinkedin.com/in%20' + self.keyword
resp = requests.get(url, headers=headers)
if ("Our systems have detected unusual traffic from your computer network.") in resp.text:
print("Running into captchas")
return
self.all_htmls += resp.text
self.counter += 100
def parse_links(self):
reg_links = re.compile(r"url=https:\/\/www\.linkedin.com(.*?)&")
self.temp = reg_links.findall(self.all_htmls)
results = []
for regex in self.temp:
final_url = regex.replace("url=", "")
results.append("https://www.linkedin.com" + final_url)
return results
def parse_people(self):
"""
:param html: parse the html for Linkedin Profiles using regex
:return: a list of
"""
reg_people = re.compile(r'">[a-zA-Z0-9._ -]* -|\| LinkedIn')
self.temp = reg_people.findall(self.all_htmls)
print(self.temp)
results = []
for iteration in (self.temp):
delete = iteration.replace(' | LinkedIn', '')
delete = delete.replace(' - LinkedIn', '')
delete = delete.replace(' profiles ', '')
delete = delete.replace('LinkedIn', '')
delete = delete.replace('"', '')
delete = delete.replace('>', '')
delete = delete.strip("-")
if delete != " ":
results.append(delete)
return results
if __name__ == "__main__":
ls = LinkedinScraper(keyword=args.keyword,limit=args.limit)
ls.search()
links = ls.parse_links()
print(links)
profiles = ls.parse_people()
print(profiles)<file_sep>/README.md
# Linkedin Scraper
A simple Linkedin Scraper that uses Google to search for Linkedin Profiles
## Requirements
* python2.7+
* pip
* requests
## Installation
```bash
pip install -r requirements.txt
```
## Usage
```bash
python linkedin_scraper.py --keyword "nasa" --limit 500
```
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
Please make sure to update tests as appropriate.
## License
[MIT](https://choosealicense.com/licenses/mit/) | 04997e3a9d772acfb69d2b9fe9a0c82fbf464d5d | [
"Markdown",
"Python"
] | 2 | Python | serply-inc/linkedin_scraper | 5a2ebffdd86dced98a28a0ed960494197b7f91fa | 9cedf6276a96c5087a43f5ee92578021495997ce |
refs/heads/master | <repo_name>Blackhart/unity3D-csharp-racingGame-Tracks<file_sep>/src/GUI/ScoreRanking.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
public class ScoreRanking : MonoBehaviour
{
public Transform[] placeholders;
public UILabel[] labels;
static public Dictionary<int, int> players;
private bool init = false;
private int localID;
private Filin localPlayer;
private void Start()
{
}
public void Init()
{
GameObject network = GameObject.Find("Network");
if (network)
{
players = new Dictionary<int, int>();
GameObject[] lPlayers = GameObject.FindGameObjectsWithTag("Player");
foreach (GameObject lObject in lPlayers)
{
if (players.ContainsKey(lObject.GetComponent<NetworkCharacter>().ID) == false)
players.Add(lObject.GetComponent<NetworkCharacter>().ID, 0);
if (lObject.GetPhotonView().isMine)
{
this.localID = lObject.GetComponent<NetworkCharacter>().ID;
this.localPlayer = lObject.GetComponent<Filin>();
}
}
for (int i = 0; i < lPlayers.Length; ++i)
this.labels[i].alpha = 1f;
for (int i = lPlayers.Length; i < this.labels.Length; ++i)
this.labels[i].alpha = 0f;
}
else
this.enabled = false;
this.init = true;
}
public void Reset()
{
this.init = false;
}
private void Update()
{
if (this.init && GameLogic.isRaceStart)
{
players[this.localID] = Mathf.FloorToInt(this.localPlayer.m_score);
players = players.OrderByDescending(kvp => kvp.Value).ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
for (int i = 0, size = players.Count; i < size; ++i)
this.labels[i].text = players.Keys.ElementAt(i) + "P - " + players.Values.ElementAt(i) + " Go";
}
else if (GameLogic.isRaceStart)
{
this.Init();
}
}
static public void ChangeScore(int score, int ID)
{
if (players != null)
players[ID] = score;
}
}
<file_sep>/src/Controlleur/FXManager.cs
using UnityEngine;
using System.Collections;
public class FXManager : MonoBehaviour
{
public GameObject collisionLeft;
public GameObject collisionRight;
public GameObject collisionFront;
public GameObject discharge;
public GameObject grabCollectiblePart1;
public GameObject grabCollectiblePart2;
public GameObject turbo;
// Use this for initialization
void Start()
{
this.collisionLeft.particleSystem.Stop();
this.collisionRight.particleSystem.Stop();
this.collisionFront.particleSystem.Stop();
this.discharge.particleSystem.Stop();
this.grabCollectiblePart1.particleSystem.Stop();
this.grabCollectiblePart2.particleSystem.Stop();
this.turbo.SetActive(false);
}
}
<file_sep>/src/GUI/WrongWayDisplay.cs
using UnityEngine;
using System.Collections;
public class WrongWayDisplay : MonoBehaviour
{
public PulsatingItem wrongWayLabel;
public float dotProductThreshold = -0.7f;
private Transform localPlayer;
private bool init = false;
private void Start()
{
this.Init();
}
public void Init()
{
GameObject network = GameObject.Find("Network");
if (network)
{
GameLogic gameLogic = network.GetComponent<GameLogic>();
if (gameLogic)
{
GameObject localPlayer = gameLogic.Player;
if (localPlayer == null)
this.enabled = false;
else
this.localPlayer = localPlayer.GetComponent<Transform>();
}
}
else
{
GameObject localPlayer = GameObject.Find("Player");
if (localPlayer == null)
this.enabled = false;
else
this.localPlayer = localPlayer.GetComponent<Transform>();
}
this.init = true;
}
private void Update()
{
if (this.init)
{
Vector3 closest = Vector3.zero;
float percent = 0f;
iTween.ClosestPointOnPath(iTweenPath.GetPath("plateformRail"), this.localPlayer.position, 0.01f, out closest, out percent);
if (Vector3.Dot((this.localPlayer.forward).normalized, (iTween.PointOnPath(iTweenPath.GetPath("plateformRail"), percent + 0.001f) - closest).normalized) < this.dotProductThreshold)
{
if (!this.wrongWayLabel.enabled)
this.wrongWayLabel.enabled = true;
}
else
this.wrongWayLabel.enabled = false;
}
}
}
<file_sep>/src/Gameplay/Filin.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
#region Enum
public enum eFilinState
{
_ACTIVE_ = 0,
_DISACTIVE_,
_CUT_
}
#endregion
public class Filin : Photon.MonoBehaviour
{
#region public members
public GameObject m_target; // plateforme
public float m_maxDist; // Distance max à laquelle le joueur peux s'accrocher à la plateforme (en unité)
public float m_scoreAdd; // nombre de points à ajouter au joueur par secondes
public float m_score // score du joueur;
{
private set
{
_score = value;
}
get
{
return _score;
}
}
public float m_coeff
{
get
{
return _scoreMultScript.getProgress(_time);
}
}
public GUIText m_indicBonus; // text bonus
public eFilinState _filinState;
public string eventName;
public FmodEventAsset asset;
public FmodEventAudioSource source;
public float WidthFilin = 0.01f;
#endregion
#region private members
private ScoreMultiplicateur _scoreMultScript;
private PlateformData _plateformData;
private RaycastHit _rayCastData;
private Transform _filinPos;
private float _score;
private float _time;
public LineRenderer _lineRender;
private AnimationController animationController;
#endregion
#region Init
void Awake()
{
if (Moteur.CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
else
{
this.source.playOnAwake = false;
}
}
public void Start()
{
if (this.GetComponent<Controlleur>().noNetwork)
{
Init();
}
}
public void Init()
{
_filinState = eFilinState._DISACTIVE_;
/*_lineRender = gameObject.AddComponent<LineRenderer>();
_lineRender.material.color = Color.red;*/
_lineRender.SetWidth(0.01f, 0.01f);
_lineRender.SetVertexCount(2);
_lineRender.enabled = false;
_filinPos = transform.FindChild("FilinPos");
_score = 0.0f;
_time = 0.0f;
_scoreMultScript = m_target.GetComponent<ScoreMultiplicateur>();
_plateformData = m_target.GetComponent<PlateformData>();
updateTextBonus();
this.animationController = this.GetComponent<AnimationController>();
}
#endregion
#region Update
void LateUpdate()
{
// Error
if (!_filinPos || !_scoreMultScript || !_plateformData)
return;
if (photonView.isMine && GameLogic.isRaceStart || this.GetComponent<Controlleur>().noNetwork)
getState();
if (_filinState == eFilinState._ACTIVE_)
activate();
else if (_filinState == eFilinState._DISACTIVE_)
disactive();
else if (_filinState == eFilinState._CUT_)
cut();
if (photonView.isMine && GameLogic.isRaceStart || this.GetComponent<Controlleur>().noNetwork)
updateTextBonus();
}
#endregion
#region GameLogic
void activate()
{
_lineRender.enabled = true;
_lineRender.SetWidth(WidthFilin, WidthFilin);
_lineRender.SetPosition(0, _filinPos.position);
_lineRender.SetPosition(1, m_target.transform.position);
_plateformData.m_Player = gameObject;
if (photonView.isMine || this.GetComponent<Controlleur>().noNetwork)
{
this.source.Play();
addScore();
}
}
void disactive()
{
if (this.source.getStatus() != "Stopped")
{
this.source.Stop();
}
_lineRender.enabled = false;
if (_plateformData.m_Player == gameObject)
_plateformData.m_Player = null;
if (photonView.isMine || this.GetComponent<Controlleur>().noNetwork)
_time = _scoreMultScript.getLastPalier(_time);
}
void cut()
{
if (this.source.getStatus() != "Stopped")
{
this.source.Stop();
}
_lineRender.enabled = false;
if (_plateformData.m_Player == gameObject)
_plateformData.m_Player = null;
if (photonView.isMine || this.GetComponent<Controlleur>().noNetwork)
_time = 0.0f;
}
void getState()
{
bool lStateMan = Input.GetButton("Filin");
if (!lStateMan)
_filinState = eFilinState._DISACTIVE_;
else
{
if (_filinState != eFilinState._CUT_ && (!_plateformData.m_Player || _plateformData.m_Player == gameObject) &&
LaunchRay(_filinPos.position, m_target.transform.position - _filinPos.position))
{
_filinState = eFilinState._ACTIVE_;
this.animationController.grap = true;
}
else
_filinState = eFilinState._CUT_;
}
}
RaycastHit[] SortTargetsByDistance(RaycastHit[] hit)
{
int count = hit.Length;
RaycastHit[] ret = hit;
Dictionary<float, RaycastHit> lDico = new Dictionary<float, RaycastHit>();
for (int i = 0; i < count; i++)
{
float lKey = hit[i].distance;
while (lDico.ContainsKey(lKey))
lKey += 0.01f;
lDico.Add(lKey, hit[i]);
}
lDico = lDico.OrderBy(kvp => kvp.Key).ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
int index = 0;
foreach (KeyValuePair<float, RaycastHit> elem in lDico)
{
ret[index] = elem.Value;
index++;
}
return ret;
}
bool LaunchRay(Vector3 pStart, Vector3 pEnd)
{
int lLayer = 1 << 11;
int count;
lLayer = ~lLayer;
RaycastHit[] hit = Physics.RaycastAll(pStart, pEnd, /*out _rayCastData,*/ m_maxDist/*, lLayer*/);
hit = this.SortTargetsByDistance(hit);
count = hit.Length;
for (int i = 0; i < count; i++)
{
if (hit[i].collider.gameObject == this.gameObject || hit[i].collider.gameObject == m_target)
{
if (hit[i].collider.gameObject == m_target)
{
return true;
}
}
else
return false;
}
return false;
}
void addScore()
{
_time += Time.deltaTime;
m_score += m_scoreAdd * Time.deltaTime * _scoreMultScript.getCoeff(_time);
if (photonView.isMine)
{
GameLogic.SendScore((int)m_score, this.GetComponent<NetworkCharacter>().ID);
}
}
public GameObject getPlayer()
{
if (_plateformData == null)
return null;
return _plateformData.m_Player;
}
#endregion
#region GUI
void updateTextBonus()
{
float lCoeff;
lCoeff = _scoreMultScript.getCoeff(_time);
if (m_indicBonus != null)
m_indicBonus.text = "x" + lCoeff;
}
#endregion
#region Reseau
public void OnPhotonSerializeView(PhotonStream stream, PhotonMessageInfo info)
{
if (stream.isWriting)
{
// We own this player: send the others our data
stream.SendNext(this._filinState);
}
else
{
// Network player, receive data
this._filinState = (eFilinState)stream.ReceiveNext();
}
}
#endregion
#region Tools
private void OnDrawGizmos()
{
Color color = Gizmos.color;
Gizmos.color = Color.red;
Gizmos.DrawWireSphere(transform.FindChild("FilinPos").position, this.m_maxDist);
Gizmos.color = color;
}
#endregion
}<file_sep>/src/GUI/Chrono.cs
using UnityEngine;
using System.Collections;
public class Chrono : MonoBehaviour
{
private System.DateTime current;
private System.TimeSpan diff;
public bool started = false;
public UILabel label;
// Use this for initialization
void Start()
{
if (GameObject.Find("Player").GetComponent<Controlleur>().noNetwork == true)
{
this.current = System.DateTime.Now;
this.started = true;
}
}
// Update is called once per frame
void Update()
{
if (this.started == true)
{
this.diff += System.DateTime.Now - this.current;
this.current = System.DateTime.Now;
this.label.text = string.Format("{0:D2}:{1:D2}:{2:D2}", this.diff.Minutes, this.diff.Seconds, this.diff.Milliseconds);
}
}
public void ResetTimer()
{
this.current = System.DateTime.Now;
this.diff = System.TimeSpan.Zero;
}
}
<file_sep>/src/Camera/PointToLook.cs
using UnityEngine;
using System.Collections;
public class PointToLook : MonoBehaviour
{
public float distanceLevel = 100;
public GameObject player;
public float distancePlayer = 0;
public float distanceParcouru;
// Use this for initialization
void Start ()
{
if (this.distanceLevel == 0.0f)
this.distanceLevel = 1.0f;
distanceParcouru = distancePlayer;
transform.position = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), this.distanceParcouru / distanceLevel);
}
// Update is called once per frame
void Update ()
{
float tmp;
if (this.player)
{
tmp = this.getDistance();
this.distanceParcouru += (tmp / 100);
if (this.distanceParcouru < 0)
this.distanceParcouru = 0;
if (this.distanceParcouru > this.distanceLevel)
this.distanceParcouru = this.distanceLevel;
transform.position = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), this.distanceParcouru / distanceLevel);
}
}
private float getDistance()
{
Vector3 next;
Vector3 last;
Vector3 tmpPlayer;
float angle;
float distance;
float ret;
next = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), (this.distanceParcouru + 1f) / distanceLevel);
last = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), this.distanceParcouru / distanceLevel);
tmpPlayer = player.transform.position;
tmpPlayer.y = last.y;
next = next - last;
last = tmpPlayer - last;
angle = Vector3.Angle(next, last) * Mathf.Deg2Rad;
distance = Mathf.Abs(Vector3.Distance(transform.position, tmpPlayer));
ret = Mathf.Cos(angle) * distance;
return ret;
}
}
<file_sep>/src/Gameplay/LoadNextLevel.cs
using UnityEngine;
using System.Collections;
public class LoadNextLevel : MonoBehaviour
{
public GameObject murToLoad;
public GameObject roadToLoad;
public GameObject murToUnload;
public GameObject roadToUnload;
public bool firstPart;
public bool network;
private int nbPlayersInZone = 0;
void Start()
{
if (this.firstPart == true)
{
if (this.network == false)
{
this.nbPlayersInZone = 1;
}
}
}
public void Init()
{
this.nbPlayersInZone = RandomMatchmaker.playersList.Count;
}
void OnEnable()
{
this.nbPlayersInZone++;
}
void VerifNbPlayersInZone()
{
if (this.nbPlayersInZone == 0)
{
this.murToLoad.SetActive(true);
this.roadToLoad.SetActive(true);
this.murToUnload.SetActive(false);
this.roadToUnload.SetActive(false);
}
else
{
if (this.murToUnload.activeInHierarchy == false)
{
this.murToUnload.SetActive(true);
this.roadToUnload.SetActive(true);
}
}
}
void OnTriggerEnter(Collider other)
{
if (other.CompareTag("Player") == true && !other.isTrigger)
{
float angle = Vector3.Angle(other.transform.forward, this.transform.forward);
if (angle > 0.0f && angle <= 90.0f)
{
this.nbPlayersInZone--;
this.VerifNbPlayersInZone();
}
else
{
this.nbPlayersInZone++;
this.VerifNbPlayersInZone();
}
}
}
}
<file_sep>/src/Controlleur/stabilityBoard.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class stabilityBoard : MonoBehaviour
{
public float floatSpeed = 2f;
public float hoverboardHeight;
public float lerpAngle = 0.5f;
public float forceJump = 10f;
public float forceDown = 10f;
public float anglePente = 45f;
public string nameRoad = "Ground";
private Controlleur control;
private bool onGround = false;
void Start()
{
this.control = this.GetComponent<Controlleur>();
}
void FixedUpdate()
{
this.AlignBoard();
this.HoverEffect();
this.GravityDown();
this.Jump();
}
void AlignBoard()
{
RaycastHit hit;
if (Physics.Raycast(transform.position, -transform.up, out hit) && hit.collider.CompareTag(nameRoad))
{
Quaternion rot = Quaternion.FromToRotation(transform.up, hit.normal) * this.rigidbody.rotation;
//if (Quaternion.Angle(this.rigidbody.rotation, rot) < anglePente)
this.rigidbody.MoveRotation(Quaternion.Lerp(this.rigidbody.rotation, rot, lerpAngle));
}
else
{
Quaternion rot = this.rigidbody.rotation;
Vector3 tmp = rot.eulerAngles;
tmp.z = 0;
rot.eulerAngles = tmp;
this.transform.rotation = Quaternion.Slerp(this.transform.rotation, rot, lerpAngle);
}
}
void HoverEffect()
{
RaycastHit hit;
float diffDistance;
if (Physics.Raycast(transform.position, -transform.up, out hit, hoverboardHeight))
{
if (control != null && control.currentSpeed < 10f)
{
this.rigidbody.AddForce(this.transform.up * floatSpeed, ForceMode.Force);
}
else
{
diffDistance = hoverboardHeight - hit.distance;
this.transform.Translate(Vector3.up * diffDistance);
}
this.gameObject.GetComponent<Controlleur>().state = EState.e_iddle;
this.onGround = true;
}
else
this.onGround = false;
}
void Jump()
{
if (Input.GetButtonDown("Jump") == true && this.onGround)
{
this.rigidbody.AddForce(this.transform.up * forceJump, ForceMode.VelocityChange);
this.onGround = false;
this.gameObject.GetComponent<Controlleur>().state = EState.e_Jump;
}
}
void GravityDown()
{
RaycastHit hit;
if (this.onGround == false)
{
if (Physics.Raycast(transform.position, -transform.up, out hit) && hit.collider.CompareTag(nameRoad))
this.rigidbody.AddForce(-transform.up * forceDown, ForceMode.Acceleration);
}
}
}
<file_sep>/src/Network/Lan_broadcast.cs
using UnityEngine;
using System.Net;
using System.Net.Sockets;
using System.Collections.Generic;
public class Lan_broadcast : MonoBehaviour
{
private enum e_state { ACTIVE, INACTIVE };
public int __udp_port = 22124;
private UdpClient __objUDPClient;
private Queue<string> ListRecvMessages;
private List<string> ListMessages;
private List<string> ListServer;
private e_state __current_state = e_state.INACTIVE;
private float __timer = 0.0f;
private string addr;
private RandomMatchmaker myNetwork;
private bool err = true;
void Start()
{
while (this.err)
{
try
{
this.__objUDPClient = new UdpClient(this.__udp_port);
this.ListRecvMessages = new Queue<string>();
this.ListMessages = new List<string>();
this.ListServer = new List<string>();
this.myNetwork = GameObject.Find("Network").GetComponent<RandomMatchmaker>() as RandomMatchmaker;
this.err = false;
}
catch (System.Exception e)
{
this.err = true;
this.__udp_port++;
Debug.LogError(e.Message);
}
}
this.addr = Network.player.ipAddress;
}
void Update()
{
if (this.__current_state == e_state.ACTIVE)
{
this.__timer += Time.deltaTime;
if (this.__timer > 60.0f)
{
this.__timer = 0.0f;
this.Refresh();
}
}
}
public void SearchServer()
{
// 0 correspond to I research a server.
this.SendUdpMessage("[Search]0");
}
public void SendUdpMessage(string str)
{
try
{
byte[] objByteMessageToSend = System.Text.Encoding.ASCII.GetBytes(str);
IPEndPoint obj = new IPEndPoint(IPAddress.Broadcast, this.__udp_port);
this.__objUDPClient.Send(objByteMessageToSend, objByteMessageToSend.Length, obj);
}
catch (System.Exception e)
{
Debug.LogError(e.ToString());
}
}
public void StopBroadcast()
{
this.__timer = 0.0f;
this.__objUDPClient.EnableBroadcast = false;
this.__current_state = e_state.INACTIVE;
this.ListRecvMessages.Clear();
this.ListServer.Clear();
this.ListMessages.Clear();
}
public void StartBroadcast()
{
this.ListRecvMessages.Clear();
this.ListServer.Clear();
this.ListMessages.Clear();
this.__objUDPClient.EnableBroadcast = true;
this.__current_state = e_state.ACTIVE;
this.BeginSearch();
}
public List<string> StartSearch()
{
this.SearchServer();
return this.ListServer;
}
public int WaitingMessage()
{
return this.ListRecvMessages.Count;
}
private void BeginSearch()
{
//this.__timer += 1;
this.__objUDPClient.BeginReceive(new System.AsyncCallback(this.EndSearch), null);
}
private void EndSearch(System.IAsyncResult objResult)
{
IPEndPoint objSendersIPEndPoint = new IPEndPoint(IPAddress.Any, 0);
byte[] objByteMessage = this.__objUDPClient.EndReceive(objResult, ref objSendersIPEndPoint);
if (objByteMessage.Length > 0)
{
string message = System.Text.Encoding.ASCII.GetString(objByteMessage);
if (!message.Contains("[Search]"))
this.ListMessages.Add(message);
else
{
if (message.Contains("[Search]0"))
{
if (this.myNetwork.eStatus == RandomMatchmaker.NetStatus.HOSTING)
{
string infos = "[Search]" + this.addr + " " + this.myNetwork.hostName + " " + RandomMatchmaker.playersList.Count + " / " + this.myNetwork.maxPlayerPerGame;
this.SendUdpMessage(infos);
}
}
else
{
if (this.myNetwork.eStatus == RandomMatchmaker.NetStatus.SEARCHING)
{
string convertMessage = message.Remove(0, 8);
Debug.Log("Add serv " + convertMessage);
if (!this.ListServer.Contains(convertMessage))
this.ListServer.Add(convertMessage);
}
}
}
}
if (this.__current_state == e_state.ACTIVE)
{
this.BeginSearch();
}
}
public void Refresh()
{
this.__timer = 0.0f;
this.Clear();
this.SearchServer();
}
public void Clear()
{
this.ListMessages.Clear();
this.ListRecvMessages.Clear();
this.ListServer.Clear();
}
}<file_sep>/src/Network/RandomMatchmaker.cs
using UnityEngine;
using System.Collections.Generic;
public class RandomMatchmaker : MonoBehaviour
{
public enum NetStatus
{
IDLE = 0,
SEARCHING,
HOSTING,
INGAME
};
private enum Page
{
NONE = 0,
ACCUEIL,
CHANGECHARACTERSOLO,
ROOM,
CHANGECHARACTER,
LOBBY
};
public Texture[] aTexture = new Texture[4];
public static List<Player> playersList;
public List<string> gamesList;
public Lan_broadcast broadcaster;
public NetStatus eStatus = NetStatus.IDLE;
public string hostName;
public int maxPlayerPerGame = 4;
public static bool isInLobby;
public static bool isNeedSetOtherPlayers = true;
public static GameObject accueil;
public static GameObject join;
public static GameObject create;
public static GameObject lobby;
public static GameObject room;
public static GameObject changeCharacter;
public static GameObject changeCharacterSolo;
public UILabel[] gamesLabel;
public UILabel[] playersLabel;
public UISprite[] playersPanel;
public UILabel[] playersState;
public UISprite[] gamesButton;
public UILabel ipLabel;
public UISprite buttonLaunch;
public static LoadingScreen loadingSolo;
public static LoadingScreen loadingMulti;
public int inLoading = -1;
private bool isInCharacterSelection = false;
private float pictureWidth = 60.0f;
private float spaceBetweenPicture = 15.0f;
private string ip = "";
private string[] aCharacModels;
private Page oldPage;
private Page newPage;
private DJLoup themeSound;
// Use this for initialization
void Start()
{
RandomMatchmaker.accueil = GameObject.Find("Accueil");
RandomMatchmaker.join = GameObject.Find("Join");
RandomMatchmaker.create = GameObject.Find("Create");
RandomMatchmaker.lobby = GameObject.Find("Lobby");
RandomMatchmaker.room = GameObject.Find("Room");
RandomMatchmaker.changeCharacter = GameObject.Find("ChangeCharacter");
RandomMatchmaker.changeCharacterSolo = GameObject.Find("ChangeCharacterSolo");
RandomMatchmaker.loadingSolo = GameObject.Find("LoadingSolo").GetComponent<LoadingScreen>();
RandomMatchmaker.loadingMulti = GameObject.Find("Loading").GetComponent<LoadingScreen>();
RandomMatchmaker.accueil.SetActive(false);
RandomMatchmaker.join.SetActive(false);
RandomMatchmaker.create.SetActive(false);
RandomMatchmaker.lobby.SetActive(false);
RandomMatchmaker.room.SetActive(false);
RandomMatchmaker.loadingSolo.gameObject.SetActive(false);
RandomMatchmaker.loadingMulti.gameObject.SetActive(false);
RandomMatchmaker.changeCharacterSolo.SetActive(false);
int count = this.gamesLabel.Length;
for (int i = 0; i < count; i++)
{
this.gamesButton[i].gameObject.SetActive(false);
}
playersList = new List<Player>();
gamesList = new List<string>();
isInLobby = false;
this.hostName = "toto";
this.aCharacModels = new string[4];
this.aCharacModels[0] = "Scruffy";
this.aCharacModels[1] = "Squad";
this.newPage = Page.ACCUEIL;
themeSound = GameObject.Find("DJLoup").GetComponent<DJLoup>();
themeSound.launchMenuThemeSong();
}
void OnGUI()
{
if (GameLogic.isLaunch == false)
{
GUILayout.Label(PhotonNetwork.connectionStateDetailed.ToString());
}
}
void Update()
{
/*if (GameLogic.isLaunch)
{
if (Input.GetKeyDown(KeyCode.R))
{
GameLogic.SendRestart();
GameObject[] players = GameObject.FindGameObjectsWithTag("Player");
for (int i = 0; i < GameLogic.nbPlayers; ++i)
{
GameLogic.SetPlayer(players[i], i);
}
}
}*/
if (GameLogic.isLaunch == true)
{
GameObject[] players = GameObject.FindGameObjectsWithTag("Player");
if (isNeedSetOtherPlayers == true && players.Length == GameLogic.nbPlayers)
{
this.broadcaster.StopBroadcast();
for (int i = 0; i < GameLogic.nbPlayers; i++)
{
GameLogic.SetPlayer(players[i], i);
}
isNeedSetOtherPlayers = false;
GameLogic.StartCountdown();
}
if (this.eStatus != NetStatus.INGAME)
{
this.eStatus = NetStatus.INGAME;
}
RandomMatchmaker.room.SetActive(false);
}
if (isInLobby == true && PhotonNetwork.connectionStateDetailed != PeerState.Joined)
{
RandomMatchmaker.lobby.SetActive(true);
if (this.oldPage != this.newPage)
{
RandomMatchmaker.lobby.GetComponent<SelectItem>().Select();
this.oldPage = this.newPage;
}
this.DrawInfoLobby();
RandomMatchmaker.accueil.SetActive(false);
RandomMatchmaker.join.SetActive(false);
RandomMatchmaker.create.SetActive(false);
RandomMatchmaker.room.SetActive(false);
}
else if (PhotonNetwork.connectionStateDetailed == PeerState.PeerCreated)
{
if (this.isInCharacterSelection == false)
{
RandomMatchmaker.accueil.SetActive(true);
if (this.oldPage != this.newPage)
{
RandomMatchmaker.accueil.GetComponent<SelectItem>().Select();
this.oldPage = this.newPage;
}
RandomMatchmaker.lobby.SetActive(false);
RandomMatchmaker.room.SetActive(false);
RandomMatchmaker.changeCharacterSolo.SetActive(false);
for (int i = 0; i < 5; i++)
{
this.gamesButton[i].gameObject.SetActive(false);
}
}
else
{
RandomMatchmaker.changeCharacterSolo.SetActive(true);
if (this.oldPage != this.newPage)
{
RandomMatchmaker.changeCharacterSolo.GetComponent<SelectItem>().Select();
this.oldPage = this.newPage;
}
RandomMatchmaker.accueil.SetActive(false);
RandomMatchmaker.join.SetActive(false);
RandomMatchmaker.create.SetActive(false);
}
}
else if (PhotonNetwork.connectionStateDetailed == PeerState.Joined && GameLogic.isLaunch == false)
{
if (isInLobby == true && PhotonNetwork.player.ID == GameLogic.playerWhoIsIt)
{
RandomMatchmaker.playersList.Clear();
PhotonNetwork.Disconnect();
}
/*else if (this.isInCharacterSelection)
{
RandomMatchmaker.changeCharacter.SetActive(true);
if (this.oldPage != this.newPage)
{
RandomMatchmaker.changeCharacter.GetComponent<SelectItem>().Select();
this.oldPage = this.newPage;
}
RandomMatchmaker.room.SetActive(false);
}*/
else
{
isInLobby = false;
RandomMatchmaker.room.SetActive(true);
if (this.oldPage != this.newPage)
{
RandomMatchmaker.room.GetComponent<SelectItem>().Select();
this.oldPage = this.newPage;
}
this.DrawInfoRoom(RandomMatchmaker.playersList);
}
RandomMatchmaker.accueil.SetActive(false);
RandomMatchmaker.join.SetActive(false);
RandomMatchmaker.create.SetActive(false);
RandomMatchmaker.lobby.SetActive(false);
for (int i = 0; i < 5; i++)
{
this.gamesButton[i].gameObject.SetActive(false);
}
}
if (this.inLoading == 0)
{
if (RandomMatchmaker.loadingSolo.IsLoaded() == true)
{
Destroy(RandomMatchmaker.changeCharacterSolo);
this.LaunchGameSolo();
}
else
{
RandomMatchmaker.changeCharacterSolo.SetActive(false);
RandomMatchmaker.loadingSolo.gameObject.SetActive(true);
}
}
else if (this.inLoading == 1)
{
if (RandomMatchmaker.loadingMulti.IsLoaded() == true)
{
this.LaunchGame();
}
else
{
RandomMatchmaker.room.SetActive(false);
RandomMatchmaker.loadingMulti.gameObject.SetActive(true);
}
}
if (RandomMatchmaker.loadingMulti.gameObject.activeInHierarchy == true)
{
RandomMatchmaker.room.SetActive(false);
}
}
public void ClickOnQuit()
{
#if UNITY_EDITOR
UnityEditor.EditorApplication.isPlaying = false;
#else
Application.Quit();
#endif
}
public void ClickOnMulti()
{
RandomMatchmaker.join.SetActive(true);
RandomMatchmaker.create.SetActive(true);
}
public void ClickOnJoin()
{
isInLobby = true;
this.eStatus = NetStatus.SEARCHING;
this.broadcaster.StartBroadcast();
this.gamesList = this.broadcaster.StartSearch();
this.newPage = Page.LOBBY;
}
public void ClickOnCreate()
{
RandomMatchmaker.room.GetComponent<ChangeCharacter>().UpdateCharacterSelection(-1);
PhotonNetwork.Connect(Network.player.ipAddress, 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.broadcaster.StartBroadcast();
this.eStatus = NetStatus.HOSTING;
this.newPage = Page.ROOM;
}
public void ClickOnSolo()
{
RandomMatchmaker.changeCharacterSolo.GetComponent<ChangeCharacter>().UpdateCharacterSelection(-1);
this.isInCharacterSelection = true;
this.newPage = Page.CHANGECHARACTERSOLO;
}
public void ClickOnBackSolo()
{
this.isInCharacterSelection = false;
this.newPage = Page.ACCUEIL;
}
private void LaunchGameSolo()
{
this.inLoading = -1;
RandomMatchmaker.loadingSolo.ResetTimer();
DontDestroyOnLoad(GameObject.Find("ApplicationHandler"));
Application.LoadLevel(1);
}
public void ClickOnLaunchSolo()
{
this.inLoading = 0;
}
public void ClickOnFirstSolo()
{
GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().model = "Scruffy";
RandomMatchmaker.changeCharacterSolo.GetComponent<ChangeCharacter>().UpdateCharacterSelection(0);
}
public void ClickOnSecondSolo()
{
GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().model = "Squad";
RandomMatchmaker.changeCharacterSolo.GetComponent<ChangeCharacter>().UpdateCharacterSelection(1);
}
public void ClickOnThirdSolo()
{
Debug.Log("Character incoming");
//GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().model = "Squad";
}
public void ClickOnFourthSolo()
{
Debug.Log("Character incoming");
//GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().model = "Squad";
}
void OnPhotonRandomJoinFailed()
{
PhotonNetwork.CreateRoom(null);
}
void OnJoinedLobby()
{
PhotonNetwork.JoinRandomRoom();
}
public void ClickOnBack()
{
this.ipLabel.text = "";
isInLobby = false;
this.broadcaster.StopBroadcast();
this.newPage = Page.ACCUEIL;
}
public void ClickOnJoinIP()
{
RandomMatchmaker.room.GetComponent<ChangeCharacter>().UpdateCharacterSelection(-1);
PhotonNetwork.Connect(this.ip, 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.newPage = Page.ROOM;
}
public void ClickOnRefresh()
{
this.gameObject.GetComponent<Lan_broadcast>().Refresh();
}
public void ClickOnFirstGame()
{
string[] str = this.gamesList[0].Split(' ');
PhotonNetwork.Connect(str[0], 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.newPage = Page.ROOM;
}
public void ClickOnSecondGame()
{
string[] str = this.gamesList[1].Split(' ');
PhotonNetwork.Connect(str[0], 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.newPage = Page.ROOM;
}
public void ClickOnThirdGame()
{
string[] str = this.gamesList[2].Split(' ');
PhotonNetwork.Connect(str[0], 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.newPage = Page.ROOM;
}
public void ClickOnFourthGame()
{
string[] str = this.gamesList[3].Split(' ');
PhotonNetwork.Connect(str[0], 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.newPage = Page.ROOM;
}
public void ClickOnFifthGame()
{
string[] str = this.gamesList[4].Split(' ');
PhotonNetwork.Connect(str[0], 5055, "f6065adc-6780-4bde-a618-78979b16c3c8", "0.1");
this.newPage = Page.ROOM;
}
void DrawInfoLobby()
{
string[] str;
for (int i = 0; i < gamesList.Count; ++i)
{
str = this.gamesList[i].Split(' ');
if (i < 5)
{
this.gamesLabel[i].text = str[1] + " " + str[2] + " " + str[3] + " " + str[4];
this.gamesButton[i].gameObject.SetActive(true);
}
}
this.ip = this.ipLabel.text;
}
public void BackToMenu()
{
this.broadcaster.StopBroadcast();
RandomMatchmaker.playersList.Clear();
RandomMatchmaker.isNeedSetOtherPlayers = true;
GameLogic.isLaunch = false;
GameLogic.isInPreRace = false;
GameLogic.isRaceStart = false;
GameLogic.countdown = 3.0f;
this.eStatus = NetStatus.IDLE;
PhotonNetwork.Disconnect();
this.newPage = Page.ACCUEIL;
}
public void ClickOnBackRoom()
{
this.broadcaster.StopBroadcast();
RandomMatchmaker.playersList.Clear();
PhotonNetwork.Disconnect();
this.newPage = Page.ACCUEIL;
}
public void ClickOnReady()
{
int myID = GameLogic.GetPlayerByID(PhotonNetwork.player.ID).ID;
RandomMatchmaker.playersList[myID - 1].isReady = !RandomMatchmaker.playersList[myID - 1].isReady;
GameLogic.SendChangeState(myID - 1, RandomMatchmaker.playersList[myID - 1].isReady);
}
public void ClickOnChangeCharacter()
{
this.isInCharacterSelection = true;
this.newPage = Page.CHANGECHARACTER;
}
private void LaunchGame()
{
this.inLoading = -1;
GameLogic.SendLaunchGame(RandomMatchmaker.playersList);
}
public void ClickOnLaunch()
{
this.inLoading = 1;
GameLogic.SendLoadingScreen();
}
public void DrawInfoRoom(List<Player> playerList)
{
int playerReady = 0;
int i;
this.buttonLaunch.gameObject.SetActive(false);
for (i = 0; i < playerList.Count; ++i)
{
string str = "Player " + (i + 1);
string state;
this.playersPanel[i].gameObject.SetActive(true);
if (playerList[i].isReady == false)
{
state = "";
if (playerList[i].characModel == "Scruffy")
{
this.playersPanel[i].spriteName = "ROOM_MULTI_PLAYER_NOTREADY_SCRUFFY";
}
else
{
this.playersPanel[i].spriteName = "ROOM_MULTI_PLAYER_NOTREADY_V12";
}
}
else
{
state = "READY !";
if (playerList[i].characModel == "Scruffy")
{
this.playersPanel[i].spriteName = "ROOM_MULTI_PLAYER_READY_SCRUFFY";
}
else
{
this.playersPanel[i].spriteName = "ROOM_MULTI_PLAYER_READY_V12";
}
playerReady += 1;
}
this.playersLabel[i].text = str;
this.playersState[i].text = state;
}
while (i < 4)
{
this.playersLabel[i].text = "";
this.playersState[i].text = "";
this.playersPanel[i].gameObject.SetActive(false);
i++;
}
if (playerReady == playerList.Count && playerList.Count > 0 && PhotonNetwork.player.ID == GameLogic.playerWhoIsIt)
{
this.buttonLaunch.gameObject.SetActive(true);
}
}
public void ClickOnBackCharacter()
{
this.isInCharacterSelection = false;
this.newPage = Page.ROOM;
}
public void ClickOnFirst()
{
int myID = GameLogic.GetPlayerByID(PhotonNetwork.player.ID).ID;
GameLogic.GetPlayerByID(PhotonNetwork.player.ID).characModel = this.aCharacModels[0];
GameLogic.SendChangePerso(myID - 1, this.aCharacModels[0]);
RandomMatchmaker.room.GetComponent<ChangeCharacter>().UpdateCharacterSelection(0);
this.isInCharacterSelection = false;
this.newPage = Page.ROOM;
}
public void ClickOnSecond()
{
int myID = GameLogic.GetPlayerByID(PhotonNetwork.player.ID).ID;
GameLogic.GetPlayerByID(PhotonNetwork.player.ID).characModel = this.aCharacModels[1];
GameLogic.SendChangePerso(myID - 1, this.aCharacModels[1]);
RandomMatchmaker.room.GetComponent<ChangeCharacter>().UpdateCharacterSelection(1);
this.isInCharacterSelection = false;
this.newPage = Page.ROOM;
}
public void ClickOnThird()
{
Debug.Log("Character incoming");
/*int myID = GameLogic.GetPlayerByID(PhotonNetwork.player.ID).ID;
GameLogic.GetPlayerByID(PhotonNetwork.player.ID).characModel = this.aCharacModels[2];
GameLogic.SendChangePerso(myID - 1, this.aCharacModels[0]);
this.isInCharacterSelection = false;*/
}
public void ClickOnFourth()
{
Debug.Log("Character incoming");
/*int myID = GameLogic.GetPlayerByID(PhotonNetwork.player.ID).ID;
GameLogic.GetPlayerByID(PhotonNetwork.player.ID).characModel = this.aCharacModels[3];
GameLogic.SendChangePerso(myID - 1, this.aCharacModels[0]);
this.isInCharacterSelection = false;*/
}
public void ClickOnBackResults()
{
this.oldPage = Page.NONE;
this.newPage = Page.ROOM;
}
}
<file_sep>/src/GUI/SliderPositionFromPlayer.cs
using UnityEngine;
using System.Collections;
public class SliderPositionFromPlayer : MonoBehaviour
{
public UISlider slider;
private SliderPositionFromPlatform handler;
private Controlleur player;
private bool init = false;
private float distanceParcourue;
private void Start()
{
if (this.slider == null)
{
Debug.LogWarning(this.GetType() + ": no target, disabling script");
this.enabled = false;
return;
}
this.slider.value = 0f;
}
public void Init(SliderPositionFromPlatform handler, Controlleur player)
{
this.handler = handler;
this.player = player;
this.slider.value = 0.001f;
this.init = true;
}
private void Update()
{
if (this.init == false || this.player == null)
return;
Vector3 closestPoint = Vector3.zero;
iTween.ClosestPointOnPath(iTweenPath.GetPath("plateformRail"), this.player.transform.position, 0.01f, out closestPoint, out this.distanceParcourue);
this.slider.value = this.distanceParcourue;
}
}
<file_sep>/src/GUI/BoostDisplay.cs
using UnityEngine;
using System.Collections;
public class BoostDisplay : MonoBehaviour
{
public UISprite[] sprites;
//public UISlider[] sliders;
//public UISprite[] foregrounds;
public float currentValue;
private Jauge localPlayer;
private bool init = false;
private void Start()
{
this.Init();
}
public void Init()
{
GameObject network = GameObject.Find("Network");
if (network)
{
GameLogic gameLogic = network.GetComponent<GameLogic>();
if (gameLogic)
{
GameObject localPlayer = gameLogic.Player;
if (localPlayer == null)
this.enabled = false;
else
this.localPlayer = localPlayer.GetComponent<Jauge>();
}
}
else
{
GameObject localPlayer = GameObject.Find("Player");
if (localPlayer == null)
this.enabled = false;
else
this.localPlayer = localPlayer.GetComponent<Jauge>();
}
this.init = true;
}
private void Update()
{
if (this.init)
{
this.currentValue = this.localPlayer.m_jauge;
this.UpdateCoeff();
}
}
private void UpdateCoeff()
{
int full = Mathf.FloorToInt(this.currentValue / 10);
if (this.currentValue > 0)
{
if (full >= this.sprites.Length)
full = this.sprites.Length - 1;
for (int i = 0; i < full; ++i)
{
this.sprites[i].fillAmount = 1;
//this.sliders[i].value = 1;
//this.foregrounds[i].color = Color.Lerp(Color.red, Color.green, Mathf.FloorToInt(this.currentValue / 10) * 10 / this.localPlayer.m_maxJaugeValue);
}
this.sprites[full].fillAmount = this.currentValue / 10 - full;
//this.sliders[full].foregroundWidget.enabled = true;
//this.sliders[full].value = this.currentValue / 10 - full;
//this.foregrounds[full].color = Color.Lerp(Color.red, Color.green, Mathf.FloorToInt(this.currentValue / 10) * 10 / this.localPlayer.m_maxJaugeValue);
for (int i = full + 1, size = this.sprites.Length; i < size; ++i)
{
this.sprites[i].fillAmount = 0;
//this.sliders[i].value = 0;
}
}
else
{
for (int i = 0, size = this.sprites.Length; i < size; ++i)
{
this.sprites[i].fillAmount = 0;
//this.sliders[i].value = 0;
}
}
}
}
<file_sep>/src/GUI/MoveArrow.cs
using UnityEngine;
using System.Collections;
public class MoveArrow : MonoBehaviour
{
public UISprite arrow;
public int offsetX;
public int offsetY;
public bool vertical;
public bool hideOnClick;
void OnHover(bool isOver)
{
if (isOver == true)
{
this.arrow.topAnchor.target = this.transform;
this.arrow.bottomAnchor.target = this.transform;
this.arrow.rightAnchor.target = this.transform;
this.arrow.leftAnchor.target = this.transform;
if (this.vertical == true)
{
this.arrow.leftAnchor.absolute = (this.GetComponent<UISprite>().width >> 1) - (this.arrow.width >> 1);
}
else
{
this.arrow.leftAnchor.absolute = this.offsetX;
}
this.arrow.rightAnchor.absolute = this.arrow.leftAnchor.absolute + this.arrow.width;
this.arrow.topAnchor.absolute = (this.arrow.height >> 1) + this.offsetY;
this.arrow.bottomAnchor.absolute = this.arrow.topAnchor.absolute - this.arrow.height;
this.arrow.gameObject.SetActive(true);
}
else
{
this.arrow.gameObject.SetActive(false);
}
}
void OnSelect(bool selected)
{
if (selected == true)
{
this.arrow.topAnchor.target = this.transform;
this.arrow.bottomAnchor.target = this.transform;
this.arrow.rightAnchor.target = this.transform;
this.arrow.leftAnchor.target = this.transform;
if (this.vertical == true)
{
this.arrow.leftAnchor.absolute = (this.GetComponent<UISprite>().width >> 1) - (this.arrow.width >> 1);
}
else
{
this.arrow.leftAnchor.absolute = this.offsetX;
}
this.arrow.rightAnchor.absolute = this.arrow.leftAnchor.absolute + this.arrow.width;
this.arrow.topAnchor.absolute = (this.arrow.height >> 1) + this.offsetY;
this.arrow.bottomAnchor.absolute = this.arrow.topAnchor.absolute - this.arrow.height;
this.arrow.gameObject.SetActive(true);
}
else
{
this.arrow.gameObject.SetActive(false);
}
}
void OnClick()
{
if (this.hideOnClick)
this.arrow.gameObject.SetActive(false);
}
}
<file_sep>/src/Plateforme/movePlateform.cs
using UnityEngine;
using System.Collections;
public class movePlateform : MonoBehaviour
{
public float speedPlateforme = 20f;
[HideInInspector]
public float distanceParcouru;
public bool isLaunch = false;
private Spline.Path mPath = new Spline.Path();
// Use this for initialization
void Start()
{
distanceParcouru = 0.0f;
transform.position = iTweenPath.GetPath("plateformRail")[0];
mPath = iTweenPath.GetPath("plateformRail");
}
// Update is called once per frame
void Update()
{
if (this.isLaunch == true)
{
Quaternion lQuatern = new Quaternion();
transform.position = Spline.MoveOnPath(mPath, transform.position, ref distanceParcouru, ref lQuatern, speedPlateforme);
transform.rotation = lQuatern;
}
}
}
<file_sep>/src/Network/ApplicationHandler.cs
using UnityEngine;
using System.Collections;
public class ApplicationHandler : MonoBehaviour
{
public string model = "Scruffy";
public bool network = true;
}
<file_sep>/src/Network/Player.cs
using UnityEngine;
using System.Collections;
public class Player
{
public bool isReady;
public int ID;
public int PhotonID;
public string characModel = "Scruffy";
public Player(int PhotonID, int ID)
{
this.PhotonID = PhotonID;
this.ID = ID;
}
// Use this for initialization
void Start()
{
this.isReady = false;
}
// Update is called once per frame
void Update()
{
}
}
<file_sep>/src/Plateforme/PlateformData.cs
using UnityEngine;
using System.Collections;
public class PlateformData : MonoBehaviour {
#region Public field
public GameObject m_Player;
#endregion
#region Init
void Start () {
m_Player = null;
}
#endregion
}
<file_sep>/src/Declaration.cs
using UnityEngine;
using System.Collections;
public enum EState
{
e_iddle = 0,
e_LTurn,
e_RTurn,
e_Jump,
e_Grap
}<file_sep>/src/Gameplay/Destructible.cs
using UnityEngine;
using System.Collections;
public class Destructible : MonoBehaviour
{
public GameObject mech_FX;
public Animation anim;
public bool destroyed = false;
private bool animLaunched = false;
private bool isQuitting = false;
void Update()
{
if (this.animLaunched == true)
{
if (this.anim.isPlaying == false)
{
Destroy(this.gameObject);
}
}
}
void OnApplicationQuit()
{
this.isQuitting = true;
}
void OnDestroy()
{
if (this.animLaunched == false && this.isQuitting == false && Application.isLoadingLevel == false)
{
Instantiate(this.mech_FX, this.transform.position, Quaternion.identity);
}
}
public void PlayAnim()
{
this.anim.Play();
this.animLaunched = true;
}
}
<file_sep>/src/Gameplay/BoostZone.cs
using UnityEngine;
using System.Collections;
public class BoostZone : MonoBehaviour
{
public int boostValue = 20;
#region members
private Turbo _turbo;
#endregion
// Use this for initialization
void Start ()
{
_turbo = GetComponent<Turbo>();
}
void OnTriggerEnter(Collider col)
{
if (col.tag == "ZoneBoost")
{
if (_turbo == null)
return;
_turbo.m_turboSpeed += boostValue;
}
}
}
<file_sep>/src/GUI/ScoreDisplay.cs
using UnityEngine;
using System.Collections;
public class ScoreDisplay : MonoBehaviour
{
public UISprite[] sprites;
public UILabel scoreValue;
public UILabel scoreMulti;
public float currentValue;
private ScoreMultiplicateur scoreManager;
private Filin localPlayer;
private bool init = false;
private void Awake()
{
this.currentValue = 0;
this.scoreValue.text = "0";
}
private void Start()
{
this.Init();
this.UpdateCoeff();
}
public void Init()
{
GameObject network = GameObject.Find("Network");
if (network)
{
GameLogic gameLogic = network.GetComponent<GameLogic>();
if (gameLogic)
{
GameObject localPlayer = gameLogic.Player;
if (localPlayer == null)
{
Debug.LogWarning(this.GetType() + ": no local player found, disabling script");
this.enabled = false;
}
else
this.localPlayer = localPlayer.GetComponent<Filin>();
}
}
else
{
GameObject localPlayer = GameObject.Find("Player");
if (localPlayer == null)
{
Debug.LogWarning(this.GetType() + ": no local player found, disabling script");
this.enabled = false;
}
else
this.localPlayer = localPlayer.GetComponent<Filin>();
}
GameObject platform = GameObject.Find("plateforme");
if (platform)
this.scoreManager = platform.GetComponent<ScoreMultiplicateur>();
else
{
Debug.LogWarning(this.GetType() + ": no platform found, disabling script");
this.enabled = false;
}
this.init = true;
}
private void UpdateCoeff()
{
int full = Mathf.FloorToInt(this.currentValue);
bool max = false;
if (this.currentValue > 0)
{
if (full >= this.sprites.Length)
{
full = this.sprites.Length - 1;
max = true;
}
for (int i = 0; i < full; ++i)
this.sprites[i].fillAmount = 1;
this.sprites[full].fillAmount = this.currentValue - full;
for (int i = full + 1, size = this.sprites.Length; i < size; ++i)
this.sprites[i].fillAmount = 0;
if (max)
this.scoreMulti.text = "x" + this.scoreManager.m_bonus[full + 1].y;
else
this.scoreMulti.text = "x" + this.scoreManager.m_bonus[full].y;
}
else
{
for (int i = 0, size = this.sprites.Length; i < size; ++i)
this.sprites[i].fillAmount = 0;
this.scoreMulti.text = "x" + this.scoreManager.m_bonus[0].y;
}
}
private void Update()
{
if (this.init)
{
this.currentValue = this.localPlayer.m_coeff;
this.UpdateCoeff();
this.scoreValue.text = "" + Mathf.FloorToInt(this.localPlayer.m_score);
}
}
}
<file_sep>/src/Controlleur/AnimationController.cs
using UnityEngine;
using System.Collections;
public class AnimationController : MonoBehaviour
{
//public Animator animator;
public Animation anim;
public Transform platform;
public Controlleur controller;
public Filin filin;
public Turbo turbo;
public bool hit = false;
public bool grap = false;
public PlateformeInfos platformInfos;
private eFilinState lastState;
private bool firstPassage = false;
private bool firstGrap = false;
private void Start()
{
if (this.filin != null)
this.lastState = this.filin._filinState;
this.controller = this.GetComponent<Controlleur>();
if (this.controller.photonView.isMine == true)
{
this.platformInfos.InitPlayer(this.controller);
}
}
private void FixedUpdate()
{
this.PlayAnim();
}
public void PlayAnim()
{
if (this.anim == null)
return;
if (this.hit == true)
{
if (this.anim.IsPlaying("Hit") == false && this.firstPassage == true)
{
this.hit = false;
this.firstPassage = false;
}
else
{
this.firstPassage = true;
this.anim.CrossFade("Hit");
return;
}
}
/*if (this.grap == true)
{
if (this.anim.IsPlaying("Grap") == false && this.firstGrap == true)
{
this.firstGrap = false;
this.grap = false;
}
else
{
this.anim.CrossFade("Grap");
this.firstGrap = true;
return;
}
}*/
EState state = this.controller.state;
if (this.platform != null && this.filin != null)
{
if (Vector3.Distance(this.transform.position, this.platform.position) <= this.filin.m_maxDist)
{
if (state == EState.e_LTurn)
{
this.anim.CrossFade("Drone_Lturn");
}
else if (state == EState.e_RTurn)
{
this.anim.CrossFade("Drone_Rturn");
}
else
{
this.anim.CrossFade("NearDrone");
}
if (this.platformInfos != null)
{
if (this.filin._filinState == eFilinState._ACTIVE_)
{
this.platformInfos.downloadOn.SetActive(true);
this.platformInfos.downloadOff.SetActive(false);
}
else
{
this.platformInfos.downloadOn.SetActive(false);
this.platformInfos.downloadOff.SetActive(true);
}
}
return;
}
else
{
if (this.platformInfos != null)
{
this.platformInfos.downloadOff.SetActive(false);
this.platformInfos.downloadOn.SetActive(false);
}
}
}
switch (state)
{
case EState.e_Jump:
//this.animation.CrossFadeQueued("Jump", 0.3f, QueueMode.CompleteOthers);
this.anim.CrossFade("Jump");
return;
case EState.e_LTurn:
//this.animation.CrossFadeQueued("Lturn", 0.3f, QueueMode.CompleteOthers);
this.anim.CrossFade("Lturn");
return;
case EState.e_RTurn:
//this.animation.CrossFadeQueued("Rturn", 0.3f, QueueMode.CompleteOthers);
this.anim.CrossFade("Rturn");
return;
default:
break;
}
if (this.controller.photonView.isMine == true || this.controller.noNetwork)
{
if (this.turbo.m_turbo == true)
{
this.anim.CrossFade("Turbo");
return;
}
if (this.controller.currentSpeed >= this.controller.maxSpeed * 0.8f)
{
this.anim.CrossFade("Maxspeed");
return;
}
}
this.anim.CrossFade("Idle");
//this.animator.SetBool("Grab", this.filin._filinState != this.lastState && this.filin._filinState == eFilinState._ACTIVE_);
//this.lastState = this.filin._filinState;
}
/*public void PlayAnim()
{
if (this.animator == null)
return;
EState state = this.controller.state;
switch (state)
{
case EState.e_Jump:
this.animator.SetBool("Jump", true);
break;
case EState.e_LTurn:
this.animator.SetBool("LTurn", true);
this.animator.SetBool("RTurn", false);
break;
case EState.e_RTurn:
this.animator.SetBool("RTurn", true);
this.animator.SetBool("LTurn", false);
break;
default:
this.animator.SetBool("RTurn", false);
this.animator.SetBool("LTurn", false);
this.animator.SetBool("Jump", false);
break;
}
if (this.controller.photonView.isMine == true || this.controller.noNetwork)
{
this.animator.SetBool("MaxSpeed", this.controller.currentSpeed >= (this.controller.maxSpeed * 0.8f));
}
if (this.platform != null && this.filin != null)
this.animator.SetBool("NearDrone", Vector3.Distance(this.transform.position, this.platform.position) <= this.filin.m_maxDist);
//this.animator.SetBool("Grab", this.filin._filinState != this.lastState && this.filin._filinState == eFilinState._ACTIVE_);
//this.lastState = this.filin._filinState;
}*/
}
<file_sep>/src/GUI/SliderPositionFromPlatform.cs
using UnityEngine;
using System.Collections;
public class SliderPositionFromPlatform : MonoBehaviour
{
public UISlider PlatformSlider;
public SliderPositionFromPlayer[] PlayerSliders;
private movePlateform platform;
private int nbPlayers;
private bool initPlayers = false;
private void Start()
{
if (this.PlatformSlider == null)
{
Debug.LogWarning(this.GetType() + ": no target, disabling script");
this.enabled = false;
return;
}
this.Init();
}
public void Init()
{
GameObject platform = GameObject.Find("plateforme");
if (platform)
this.platform = platform.GetComponent<movePlateform>();
else
{
Debug.LogWarning(this.GetType() + ": no platform found, disabling script");
this.enabled = false;
}
this.initPlayers = false;
}
public void InitPlayers()
{
GameObject[] players = GameObject.FindGameObjectsWithTag("Player");
this.nbPlayers = players.Length;
for (int i = 0; i < this.nbPlayers; ++i)
{
this.PlayerSliders[i].gameObject.SetActive(true);
this.PlayerSliders[i].Init(this, players[i].GetComponent<Controlleur>());
}
for (int i = nbPlayers; i < this.PlayerSliders.Length; ++i)
this.PlayerSliders[i].gameObject.SetActive(false);
this.initPlayers = true;
}
private void Update()
{
if (!this.initPlayers)
this.InitPlayers();
this.PlatformSlider.value = this.platform.distanceParcouru;
}
}
<file_sep>/src/Gameplay/RotateCollectibles.cs
using UnityEngine;
using System.Collections;
public class RotateCollectibles : MonoBehaviour
{
public float speed = 40.0f;
public CheckVisibility visibility;
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
if (this.visibility.visible == true)
{
this.gameObject.transform.Rotate(Vector3.up * (this.speed * Time.deltaTime));
}
}
}
<file_sep>/src/Network/GameLogic.cs
using UnityEngine;
using System.Collections.Generic;
public class GameLogic : Photon.MonoBehaviour
{
public static int playerWhoIsIt;
public static int nbPlayers;
public static bool isLaunch;
public static bool isRaceStart = false;
public static bool isInPreRace = false;
public static float countdown = 3.0f;
public static List<GameObject> spawnList;
public GameObject background;
private LoadNextLevel loading;
private GameObject collectibles;
private static PhotonView ScenePhotonView;
private List<Player> playerList;
private GameObject player;
private AudioClipManager soundManager;
private FadingText countdownText;
public GameObject Player
{
private set
{
}
get
{
return player;
}
}
void Start()
{
this.soundManager = Camera.main.GetComponent<AudioClipManager>();
this.loading = GameObject.Find("Trigger_LVL02").GetComponent<LoadNextLevel>();
ScenePhotonView = this.GetComponent<PhotonView>();
this.playerList = new List<Player>();
spawnList = new List<GameObject>();
spawnList.Add(GameObject.Find("PointSpawn1"));
spawnList.Add(GameObject.Find("PointSpawn2"));
spawnList.Add(GameObject.Find("PointSpawn3"));
spawnList.Add(GameObject.Find("PointSpawn4"));
isLaunch = false;
}
void Update()
{
if (isInPreRace)
{
if (this.Countdown())
{
SendStart();
isInPreRace = false;
}
}
}
void OnJoinedRoom()
{
if (PhotonNetwork.playerList.Length == 1)
{
Player hostPlayer = new Player(PhotonNetwork.player.ID, 1);
playerWhoIsIt = PhotonNetwork.player.ID;
RandomMatchmaker.playersList.Add(hostPlayer);
nbPlayers = 1;
}
}
void OnPhotonPlayerDisconnected(PhotonPlayer player)
{
Debug.Log("Player Disconnected " + player.ID);
Player toDisconnect = GetPlayerByID(player.ID);
for (int i = 0; i < RandomMatchmaker.playersList.Count; ++i)
{
if (toDisconnect.ID < RandomMatchmaker.playersList[i].ID)
{
--RandomMatchmaker.playersList[i].ID;
}
}
if (toDisconnect == null)
{
Debug.LogError("Player " + player.ID + " not found!");
}
if (PhotonNetwork.isMasterClient)
{
if (player.ID == playerWhoIsIt)
{
// if the player who left was "it", the "master" is the new "it"
TagPlayer(PhotonNetwork.player.ID);
}
}
RandomMatchmaker.playersList.Remove(toDisconnect);
}
void OnPhotonPlayerConnected(PhotonPlayer player)
{
// when new players join, we send "who's it" to let them know
// only one player will do this: the "master"
if (PhotonNetwork.isMasterClient)
{
if (GameLogic.isLaunch)
{
ScenePhotonView.RPC("GameAlreadyLaunch", player);
}
else
{
Player newPlayer = new Player(player.ID, RandomMatchmaker.playersList.Count + 1);
Debug.Log("Player Connected " + player.ID);
bool[] states = new bool[RandomMatchmaker.playersList.Count + 1];
int[] PhotonID = new int[RandomMatchmaker.playersList.Count + 1];
int[] ID = new int[RandomMatchmaker.playersList.Count + 1];
string[] models = new string[RandomMatchmaker.playersList.Count + 1];
TagPlayer(playerWhoIsIt);
RandomMatchmaker.playersList.Add(newPlayer);
for (int i = 0; i < RandomMatchmaker.playersList.Count; ++i)
{
states[i] = RandomMatchmaker.playersList[i].isReady;
PhotonID[i] = RandomMatchmaker.playersList[i].PhotonID;
ID[i] = RandomMatchmaker.playersList[i].ID;
models[i] = RandomMatchmaker.playersList[i].characModel;
}
ScenePhotonView.RPC("SetList", PhotonTargets.All, RandomMatchmaker.playersList.Count, states, PhotonID, ID, models);
}
}
}
private bool Countdown()
{
GameLogic.countdown -= Time.deltaTime;
if (GameLogic.countdown < 0.0f)
{
if (this.soundManager != null)
this.soundManager.PlayOnce("Go");
if (this.countdownText != null)
ScenePhotonView.RPC("SetCountdown", PhotonTargets.All, "GO !", 3.0f);
return true;
}
if ((GameLogic.countdown + Time.deltaTime) - Mathf.FloorToInt(GameLogic.countdown + Time.deltaTime) < Time.deltaTime)
{
if (this.soundManager != null)
this.soundManager.PlayOnce("Ready");
if (this.countdownText != null)
ScenePhotonView.RPC("SetCountdown", PhotonTargets.All, "" + Mathf.FloorToInt(GameLogic.countdown + Time.deltaTime), 0.75f);
}
return false;
}
public static Player GetPlayerByID(int PhotonID)
{
for (int i = 0; i < RandomMatchmaker.playersList.Count; ++i)
{
if (RandomMatchmaker.playersList[i].PhotonID == PhotonID)
{
return RandomMatchmaker.playersList[i];
}
}
return null;
}
public static void InitNetworkPlayer(GameObject player, int ID)
{
string str = "PointSpawn" + ID;
GameObject spawn = GameObject.Find(str);
Transform tmp = player.transform.FindChild("Hoverboard").parent;
Destroy(player.transform.FindChild("Hoverboard").gameObject);
GameObject obj = GameObject.Instantiate(Resources.Load(RandomMatchmaker.playersList[ID - 1].characModel + "/Hoverboard"), tmp.position, spawn.transform.rotation) as GameObject;
obj.transform.parent = tmp;
Vector3 vec = new Vector3(0.0f, spawn.transform.eulerAngles.y, 0.0f);
obj.transform.localEulerAngles = vec;
player.GetComponent<Controlleur>().body = obj;
player.GetComponent<Controlleur>().noNetwork = false;
player.GetComponent<Controlleur>().rail = null;
player.GetComponent<Controlleur>().Init();
player.GetComponent<Filin>().m_target = GameObject.Find("plateforme");
//player.GetComponent<Filin>().m_indicBonus = GameObject.Find("coeffMult").guiText;
player.GetComponent<Filin>().Init();
//player.GetComponent<Jauge>().m_indicJauge = GameObject.Find("Jauge").guiText;
player.GetComponent<AnimationController>().platform = GameObject.Find("plateforme").transform;
//player.GetComponent<AnimationController>().animator = obj.transform.FindChild(RandomMatchmaker.playersList[ID - 1].characModel).gameObject.GetComponent<Animator>();
player.GetComponent<AnimationController>().anim = obj.transform.FindChild(RandomMatchmaker.playersList[ID - 1].characModel).gameObject.GetComponent<Animation>();
}
public static void SetPlayer(GameObject player, int index)
{
player.GetComponent<NetworkCharacter>().ID = RandomMatchmaker.playersList[index].ID;
if (!player.GetPhotonView().isMine)
{
GameLogic.InitNetworkPlayer(player, player.GetComponent<NetworkCharacter>().ID);
}
}
public static void StartCountdown()
{
if (PhotonNetwork.isMasterClient)
{
isInPreRace = true;
}
}
public static void SendChangeState(int index, bool state)
{
ScenePhotonView.RPC("ChangeStatePlayer", PhotonTargets.All, index, state);
}
public static void SendChangePerso(int index, string perso)
{
ScenePhotonView.RPC("ChangePerso", PhotonTargets.All, index, perso);
}
public static void SendLaunchGame(List<Player> playerList)
{
for (int i = 0; i < playerList.Count; ++i)
{
ScenePhotonView.RPC("LaunchGame", PhotonNetwork.playerList[i], i + 1);
}
}
public static void TagPlayer(int playerID)
{
ScenePhotonView.RPC("TaggedPlayer", PhotonTargets.All, playerID);
}
public static void SendStart()
{
ScenePhotonView.RPC("StartRace", PhotonTargets.All);
}
// tcheat RPC restart race R
public static void SendRestart()
{
ScenePhotonView.RPC("Restart", PhotonTargets.All);
}
public static void SendEndRace(List<Player> playerList, Vector3 pos, Quaternion rot)
{
for (int i = 0; i < playerList.Count; ++i)
{
ScenePhotonView.RPC("EndRace", PhotonTargets.All, i, pos, rot);
}
}
public static void SendScore(int score, int ID)
{
ScenePhotonView.RPC("ChangeScore", PhotonTargets.All, score, ID);
}
public static void SendLoadingScreen()
{
ScenePhotonView.RPC("LoadScreen", PhotonTargets.All);
}
[RPC]
void SetCountdown(string text, float duration)
{
this.countdownText.Set(text, duration);
}
[RPC]
void TaggedPlayer(int playerID)
{
playerWhoIsIt = playerID;
//Debug.Log("tag: " + playerID);
}
[RPC]
void GameAlreadyLaunch()
{
RandomMatchmaker.playersList.Clear();
PhotonNetwork.Disconnect();
RandomMatchmaker.isInLobby = true;
Debug.Log("Game already launch");
}
[RPC]
void SetList(int nbPlayerConnected, bool[] states, int[] PhotonID, int[] ID, string[] models)
{
nbPlayers = nbPlayerConnected;
this.playerList.Clear();
for (int i = 0; i < nbPlayers; ++i)
{
Player newPlayer = new Player(PhotonID[i], ID[i]);
this.playerList.Add(newPlayer);
this.playerList[i].isReady = states[i];
this.playerList[i].characModel = models[i];
}
RandomMatchmaker.playersList = this.playerList;
}
[RPC]
void ChangeStatePlayer(int index, bool state)
{
if (this.playerList.Count > 0)
{
this.playerList[index].isReady = state;
RandomMatchmaker.playersList[index].isReady = state;
}
}
[RPC]
void ChangePerso(int index, string perso)
{
if (this.playerList.Count > 0)
{
this.playerList[index].characModel = perso;
RandomMatchmaker.playersList[index].characModel = perso;
}
}
[RPC]
void StartRace()
{
isRaceStart = true;
GameObject.Find("plateforme").GetComponent<movePlateform>().isLaunch = true;
GameObject.Find("plateforme").GetComponent<ElectricDischarge>().Init();
//GameObject.Find("GUI").GetComponent<GUI_classement>().Init();
//Debug.Log("receive start");
}
[RPC]
void LoadScreen()
{
RandomMatchmaker.loadingMulti.gameObject.SetActive(true);
}
[RPC]
void LaunchGame(int ID)
{
if (isLaunch == false)
{
RandomMatchmaker.loadingMulti.ResetTimer();
Camera.main.light.enabled = false;
RandomMatchmaker.loadingMulti.gameObject.SetActive(false);
RandomMatchmaker.accueil.SetActive(false);
RandomMatchmaker.room.SetActive(false);
RandomMatchmaker.lobby.SetActive(false);
RandomMatchmaker.changeCharacterSolo.SetActive(false);
string str = "PointSpawn" + ID;
GameObject spawn = GameObject.Find(str);
this.player = PhotonNetwork.Instantiate("Player", spawn.transform.position, Quaternion.identity, 0);
GameLogic.InitNetworkPlayer(this.player, ID);
//GameObject.Find("GUI").GetComponent<GUI_Parcours>().enabled = true;
//GameObject.Find("GUI").GetComponent<GUI_score>().init();
//GameObject.Find("GUI").GetComponent<GUI_score>().enabled = true;
//GameObject.Find("GUI").GetComponent<GUI_classement>().enabled = true;
//GameObject.Find("score").GetComponent<GUIText>().enabled = true;
//GameObject.Find("Jauge").GetComponent<GUIText>().enabled = true;
//GameObject.Find("coeffMult").GetComponent<GUIText>().enabled = true;
GameObject.Find("UI Root - Ingame").transform.GetChild(0).gameObject.SetActive(true);
this.countdownText = GameObject.Find("InfoLabel").GetComponent<FadingText>();
this.player.GetComponent<AnimationController>().platformInfos = GameObject.Find("Panel - Info").GetComponent<PlateformeInfos>();
GameObject.Find("PlatformSlider").GetComponent<SliderPositionFromPlatform>().Init();
GameObject.Find("BoostDisplay").GetComponent<BoostDisplay>().Init();
GameObject.Find("ScoreDisplay").GetComponent<ScoreDisplay>().Init();
GameObject.Find("ScoreRanking").GetComponent<ScoreRanking>().Reset();
GameObject.Find("WrongWayLabel").GetComponent<WrongWayDisplay>().Init();
this.collectibles = GameObject.Instantiate(Resources.Load("Collectibles")) as GameObject;
Camera.main.GetComponent<CameraController>().mCar = this.player;
Camera.main.GetComponent<CameraController>().mCamTarget = this.player.transform.FindChild("CamTarget");
Camera.main.GetComponent<CameraController>().mLookTarget = this.player.transform.FindChild("LookTarget");
Camera.main.GetComponent<CameraController>().enabled = true;
this.background.SetActive(false);
this.loading.Init();
isLaunch = true;
}
}
// tcheat code R for restart race
[RPC]
void Restart()
{
GameLogic.countdown = 3.0f;
PhotonNetwork.Destroy(this.player);
this.player = PhotonNetwork.Instantiate("Player", GameObject.Find("PointSpawn1").transform.position, Quaternion.identity, 0);
GameLogic.InitNetworkPlayer(this.player, 1);
//GameObject.Find("GUI").GetComponent<GUI_Parcours>().enabled = true;
//GameObject.Find("GUI").GetComponent<GUI_score>().init();
//GameObject.Find("GUI").GetComponent<GUI_score>().enabled = true;
//GameObject.Find("score").GetComponent<GUIText>().enabled = true;
//GameObject.Find("Jauge").GetComponent<GUIText>().enabled = true;
//GameObject.Find("coeffMult").GetComponent<GUIText>().enabled = true;
GameObject.Find("PlatformSlider").GetComponent<SliderPositionFromPlatform>().Init();
GameObject.Find("BoostDisplay").GetComponent<BoostDisplay>().Init();
GameObject.Find("ScoreDisplay").GetComponent<ScoreDisplay>().Init();
GameObject.Find("ScoreRanking").GetComponent<ScoreRanking>().Reset();
GameObject.Find("WrongWayLabel").GetComponent<WrongWayDisplay>().Init();
Camera.main.GetComponent<CameraController>().mCar = this.player;
StartCountdown();
}
[RPC]
void EndRace(int ID, Vector3 pos, Quaternion rot)
{
if (this.player)
{
PhotonNetwork.Destroy(this.player);
}
this.player = null;
GameObject.Find("EndRace").GetComponent<EndRace>().panelResult.SetActive(false);
GameObject.Find("Network").GetComponent<RandomMatchmaker>().ClickOnBackResults();
GameObject.Destroy(this.collectibles);
//GameObject.Find("GUI").GetComponent<GUI_Parcours>().enabled = false;
//GameObject.Find("GUI").GetComponent<GUI_score>().enabled = false;
//GameObject.Find("GUI").GetComponent<GUI_classement>().enabled = false;
//GameObject.Find("score").GetComponent<GUIText>().enabled = false;
//GameObject.Find("Jauge").GetComponent<GUIText>().enabled = false;
//GameObject.Find("coeffMult").GetComponent<GUIText>().enabled = false;
GameObject.Find("plateforme").GetComponent<movePlateform>().isLaunch = false;
GameLogic.countdown = 3.0f;
GameObject.Find("plateforme").GetComponent<movePlateform>().distanceParcouru = 0.0f;
//Vector3 lPoint = iTween.PointOnPath(iTweenPath.GetPath("plateformRail"), 0.0f);
//GameObject.Find("plateforme").transform.LookAt(lPoint);
//GameObject.Find("plateforme").transform.position = lPoint;
Camera.main.transform.position = pos;
Camera.main.transform.rotation = rot;
Camera.main.GetComponent<Camera>().fieldOfView = 60f;
isLaunch = false;
isRaceStart = false;
RandomMatchmaker.isNeedSetOtherPlayers = true;
RandomMatchmaker.playersList[ID].isReady = false;
Camera.main.GetComponent<CameraController>().enabled = false;
GameObject.Find("UI Root - Ingame").transform.GetChild(0).gameObject.SetActive(false);
this.background.SetActive(true);
Camera.main.light.enabled = true;
}
[RPC]
void ChangeScore(int score, int ID)
{
ScoreRanking.ChangeScore(score, ID);
}
}<file_sep>/src/Network/NetworkCharacter.cs
using UnityEngine;
using System.Collections;
public class NetworkCharacter : Photon.MonoBehaviour
{
public int ID;
private Vector3 direction;
private float speed;
private float boostSpeed;
private float minSpeed;
private float fwdForce;
private Controlleur controlleur;
private AnimationController animationControlleur;
void Start()
{
this.controlleur = this.gameObject.GetComponent<Controlleur>();
this.animationControlleur = this.gameObject.GetComponent<AnimationController>();
this.minSpeed = this.controlleur.minSpeed;
}
// Update is called once per frame
void Update()
{
if (!photonView.isMine && this.controlleur.noNetwork == false)
{
this.controlleur.curAngles.y = this.direction.y;
this.speed = this.minSpeed + this.fwdForce;
if (this.speed < this.minSpeed)
this.speed = this.minSpeed;
this.speed += this.boostSpeed;
this.rigidbody.AddForce(this.controlleur.body.transform.forward * this.speed * 10, ForceMode.Force);
//this.animationControlleur.animator.SetBool("MaxSpeed", this.speed >= 0.1f);
this.animationControlleur.PlayAnim();
//Debug.Log("pos: " + this.transform.position + " speed: " + this.speed);
}
//else
//Debug.Log("my state: " + this.controlleur.state);
}
public void OnPhotonSerializeView(PhotonStream stream, PhotonMessageInfo info)
{
if (stream.isWriting)
{
if (this.controlleur)
{
// We own this player: send the others our data
this.direction = new Vector3(0, this.controlleur.curAngles.y, 0);
//this.speed = this.gameObject.GetComponent<Controlleur>().currentSpeed;
this.boostSpeed = this.controlleur.boostSpeed;
this.fwdForce = Input.GetAxis("Forward") * this.controlleur.acceleration;
this.controlleur.state = this.controlleur.state;
stream.SendNext(this.direction);
stream.SendNext(this.boostSpeed);
stream.SendNext(this.fwdForce);
stream.SendNext(this.controlleur.state);
}
}
else
{
// Network player, receive data
this.direction = (Vector3)stream.ReceiveNext();
this.boostSpeed = (float)stream.ReceiveNext();
this.fwdForce = (float)stream.ReceiveNext();
if (this.controlleur)
this.controlleur.state = (EState)stream.ReceiveNext();
//this.speed = (float)stream.ReceiveNext();
}
}
[RPC]
void TaggedPlayer(int idPlayer)
{ }
}
<file_sep>/src/GUI/PlateformeInfos.cs
using UnityEngine;
using System.Collections;
public class PlateformeInfos : MonoBehaviour
{
public GameObject downloadOff;
public GameObject downloadOn;
public GameObject droneBehind;
private float distanceParcourue;
private movePlateform platform;
private Controlleur player;
void Start()
{
this.platform = GameObject.Find("plateforme").GetComponent<movePlateform>();
}
public void InitPlayer(Controlleur player)
{
this.player = player;
}
void Update()
{
float dist;
Vector3 closestPoint = Vector3.zero;
iTween.ClosestPointOnPath(iTweenPath.GetPath("plateformRail"), this.player.transform.position, 0.01f, out closestPoint, out this.distanceParcourue);
dist = this.platform.distanceParcouru - this.distanceParcourue;
if (dist < 0.0f)
{
this.droneBehind.SetActive(true);
}
else
{
this.droneBehind.SetActive(false);
}
}
}
<file_sep>/src/Gameplay/ScoreMultiplicateur.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class ScoreMultiplicateur : MonoBehaviour {
#region Public Members
public List<Vector2> m_bonus = new List<Vector2>(); // Score multiplicateurs [X: temps en secondes; Y: coeff]
#endregion
public float getProgress(float pTime)
{
float value = 0f;
for (int lIndex = 1; lIndex < m_bonus.Count; lIndex++)
{
if (pTime > m_bonus[lIndex].x)
value++;
else
{
float percent = ((pTime - m_bonus[lIndex - 1].x) / (m_bonus[lIndex].x - m_bonus[lIndex - 1].x));
if (percent <= 0)
break;
value += percent;
}
}
return value;
}
public float getCoeff(float pTime)
{
int lIndex = 0;
for ( ; lIndex < m_bonus.Count; lIndex++)
{
if (m_bonus[lIndex].x > pTime)
{
if (lIndex - 1 < 0)
return 0;
else
return m_bonus[lIndex - 1].y;
}
}
return m_bonus[lIndex - 1].y;
}
public float getLastPalier(float pTime)
{
int lIndex = 0;
for ( ; lIndex < m_bonus.Count; lIndex++)
{
if (m_bonus[lIndex].x > pTime)
{
if (lIndex - 1 < 0)
return 0;
else
return m_bonus[lIndex - 1].x;
}
}
return m_bonus[lIndex - 1].x;
}
}
<file_sep>/src/GUI/SelectItem.cs
using UnityEngine;
using System.Collections;
public class SelectItem : MonoBehaviour
{
public GameObject item;
public void Select()
{
if (this.item != null)
UICamera.selectedObject = this.item;
}
}
<file_sep>/src/GUI/ChangeCharacter.cs
using UnityEngine;
using System.Collections;
public class ChangeCharacter : MonoBehaviour
{
public UISprite[] lockedImages;
public UISprite[] parentImages;
public MoveCadenas cadenas;
private int current = -1;
void Start()
{
for (int i = 0; i < this.lockedImages.Length; i++)
{
this.parentImages[i].height = Mathf.FloorToInt(Screen.height / 1.4f);
this.parentImages[i].leftAnchor.absolute = (Screen.width >> 1) + this.parentImages[i].width * (i - 1);
this.parentImages[i].rightAnchor.absolute = this.parentImages[i].leftAnchor.absolute + this.parentImages[i].width;
}
}
public void UpdateCharacterSelection(int index)
{
if (index == -1)
{
this.cadenas.UpdateCharacterLocked(null);
if (this.current != -1)
{
this.lockedImages[this.current].gameObject.SetActive(false);
}
}
else
{
if (this.current != index)
{
if (this.current != -1)
{
this.lockedImages[this.current].gameObject.SetActive(false);
}
this.lockedImages[index].gameObject.SetActive(true);
this.cadenas.UpdateCharacterLocked(this.parentImages[index]);
}
}
this.current = index;
}
}
<file_sep>/README.md
![TracksBorder][]
![TracksBanner][]
![TracksBorder][]
[TracksBorder]: https://raw.githubusercontent.com/Blackhart/unity3D-csharp-racingGame-Tracks/master/wiki/border.png
[TracksBanner]: https://raw.githubusercontent.com/Blackhart/unity3D-csharp-racingGame-Tracks/master/wiki/banner.bmp
See [wiki][] for more details.
[wiki]: https://github.com/Blackhart/unity3D-csharp-racingGame-Tracks/wiki
<file_sep>/src/GUI/PulsatingItem.cs
using UnityEngine;
using System.Collections;
public class PulsatingItem : MonoBehaviour
{
public UIWidget item;
public float fadingTime;
public float fadingTimer;
private float lastFade = 0;
private bool started = false;
private void Start()
{
this.item.alpha = 0f;
this.enabled = false;
this.started = true;
}
private void Pulse()
{
if (this.started)
{
this.item.alpha = 1f;
TweenAlpha.Begin(this.item.gameObject, this.fadingTime, 0f);
this.lastFade = Time.timeSinceLevelLoad;
}
}
private void Update()
{
if (this.lastFade + this.fadingTimer <= Time.timeSinceLevelLoad)
this.Pulse();
}
}
<file_sep>/src/Controlleur/Turbo.cs
using UnityEngine;
using System.Collections;
public class Turbo : MonoBehaviour
{
#region Private members
private float _turboSpeed;
public float m_turboSpeed
{
get
{
return _turboSpeed;
}
set
{
_turboSpeed = Mathf.Clamp(value, 0.0f, m_turboSpeedMax);
}
}
private bool _turbo;
public bool m_turbo
{
get;
private set;
}
private Jauge _jauge;
private FXManager fxManager;
#endregion
#region Public members
public float m_turboSpeedMax; // Max speed turbo
public float m_turboAcc; // Turbo speed acceleration/s
public float m_turboDec; // Turbo speed deceleration/s
public float m_turboConso; // Consomation turbo/s
public FmodEventAudioSource source;
public string eventName;
public FmodEventAsset asset;
public Controlleur controlleur;
#endregion
void Awake()
{
if (Moteur.CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
else
{
this.source.playOnAwake = false;
}
}
// Use this for initialization
void Start()
{
_jauge = GetComponent<Jauge>();
m_turboSpeed = 0.0f;
m_turbo = false;
this.controlleur = this.GetComponent<Controlleur>();
this.fxManager = this.GetComponent<FXManager>();
}
// Update is called once per frame
void Update()
{
if (this.controlleur.photonView.isMine && GameLogic.isRaceStart || this.controlleur.noNetwork)
{
turbo();
}
}
private void turbo()
{
// Get turbo state button
if (Input.GetButtonDown("Turbo") == true)
{
m_turbo = true;
}
else if (Input.GetButtonUp("Turbo") == true)
{
m_turbo = false;
}
if (m_turbo == true)
{
this.fxManager.turbo.SetActive(true);
this.source.Play();
}
else
{
this.fxManager.turbo.SetActive(false);
if (this.source.getStatus() != "Stopped")
{
this.source.Stop();
}
}
float lTime = Time.deltaTime;
// Calculate acceleration and deceleration
if (m_turbo)
turboAcceleration(lTime);
if (!m_turbo)
turboDeceleration(lTime);
// Set speedTurbo in CharacterController
this.gameObject.GetComponent<Controlleur>().setBoostSpeed(m_turboSpeed);
}
private void turboAcceleration(float pTime)
{
float lConso = m_turboConso * pTime;
if (_jauge.m_jauge - lConso < 0.0f)
{
m_turbo = false;
return;
}
m_turboSpeed += m_turboAcc * pTime;
_jauge.m_jauge -= lConso;
}
private void turboDeceleration(float pTime)
{
m_turboSpeed -= m_turboDec * pTime;
}
}
<file_sep>/src/Camera/CameraController.cs
using UnityEngine;
using System.Collections;
public class CameraController : MonoBehaviour
{
#region Public Members
public bool inLoop = false;
public float loopAngle = 20f;
public GameObject mCar; // Car game object
public Transform mLookTarget;
public Transform mCamTarget;
public float mDistance = 6.4f; // Distance between camera and player (X axis)
public float mHeight = 10.0f; // Height between terrain and camera (Y axis)
public float mLookAtDist = 0.0f; // Additional dist between lookat point and player
public float mRotationDamping = 3.0f; // Step rotation
public float mFOVDamping = 2.0f; // Step FOV
public float mDistDamping = 2.0f; // Step dist
public float mDefaultFOV = 60.0f; // Default FOV 60.0f/70.0f
public float mRatioZoom = 0.5f; // Multiplier Ratio fov/unit speed
public float mRatioDist = 0.5f; // Multiplier Ratio dist / unit speed
#endregion
#region Private Members
private Controlleur mControlleur;
private Vector3 mWantedPos;
private Quaternion mWantedRot;
private Vector3 mWantedCarPos;
private float mRealDist;
#endregion
void Start()
{
mControlleur = mCar.GetComponent<Controlleur>();
mCamTarget.localPosition = new Vector3(0.0f, mHeight, -mDistance);
mLookTarget.localPosition = new Vector3(0.0f, 0.0f, mLookAtDist);
transform.position = mCamTarget.position;
transform.rotation = mCamTarget.rotation;
mRealDist = mDistance;
}
void LateUpdate()
{
UpdateCamTarget();
mWantedPos = mCamTarget.position;
if (inLoop)
Loop(mCamTarget);
else
mWantedRot = mCamTarget.rotation;
mWantedCarPos = mCar.transform.position;
Quaternion lRot = transform.rotation;
PosUpdate();
RotUpdate(lRot);
}
void Loop(Transform trans)
{
Quaternion tmp = trans.rotation;
trans.Rotate(Vector3.right * -loopAngle);
mWantedRot = trans.rotation;
trans.rotation = tmp;
}
void RotUpdate(Quaternion pRot)
{
transform.rotation = Quaternion.Slerp(pRot, mWantedRot, mRotationDamping * Time.deltaTime);
}
void PosUpdate()
{
// Local cam pos
Vector3 lCamPosTmp = mCar.transform.InverseTransformPoint(transform.position);
// Local target pos
Vector3 lTargetPosTmp = mCar.transform.InverseTransformPoint(mCamTarget.position);
// Move cam pos to target pos
Vector3 lPos = Vector3.Slerp(lCamPosTmp, lTargetPosTmp, mRotationDamping * Time.deltaTime);
lPos.y = Mathf.Lerp(lCamPosTmp.y, lTargetPosTmp.y, mRotationDamping * Time.deltaTime);
// Local car pos
Vector3 lCarPosTmp = new Vector3(0.0f, mHeight, 0.0f);
// Dir car pos to cam pos
Vector3 lDirNorm = (lPos - lCarPosTmp).normalized;
// Set new pos
transform.position = mCar.transform.TransformPoint(lCarPosTmp + lDirNorm * mRealDist);
}
void FixedUpdate()
{
// Get acceleration
float acc = mControlleur.currentSpeed;
// Update new magnitude from cam pos to car pos
mRealDist = Mathf.Lerp(-(mCamTarget.localPosition.z), mDistance + acc * mRatioDist, mDistDamping * Time.deltaTime);
mCamTarget.localPosition = new Vector3(mCamTarget.localPosition.x, mCamTarget.localPosition.y, -mRealDist);
// Update field of vue about speed
camera.fieldOfView = Mathf.Lerp(camera.fieldOfView, mDefaultFOV + acc * mRatioZoom, mFOVDamping * Time.deltaTime);
}
void UpdateCamTarget()
{
float lAngle = 90.0f - (Mathf.Atan(((-mCamTarget.localPosition.z + mLookAtDist) / mHeight)) * Mathf.Rad2Deg);
mCamTarget.localRotation = Quaternion.Euler(lAngle, 0.0f, 0.0f);
mCamTarget.localPosition = new Vector3(0.0f, mHeight, -mDistance);
mLookTarget.localPosition = new Vector3(0.0f, 0.0f, mLookAtDist);
//Debug.DrawRay(transform.position, transform.forward * 100.0f, Color.green);
}
}
<file_sep>/src/GUI/LoadingScreen.cs
using UnityEngine;
using System.Collections;
public class LoadingScreen : MonoBehaviour
{
public UISprite background;
public float timeNeeded;
private float timer;
// Use this for initialization
void Start()
{
this.timer = 0.0f;
}
public bool IsLoaded()
{
bool ret = false;
this.timer += Time.deltaTime;
if (this.timer >= this.timeNeeded)
{
ret = true;
}
return ret;
}
public void ResetTimer()
{
this.timer = 0.0f;
}
}
<file_sep>/src/Gameplay/EndRace.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
public class EndRace : MonoBehaviour
{
private Vector3 PositionStart;
private Quaternion RotationStart;
private RandomMatchmaker matchMaker;
public static bool inResult = false;
private GameObject collectibles;
public GameObject Obj;
public bool Network;
public Chrono chrono;
public GameObject panelResult;
public UISprite background;
public UILabel[] scoresLabel;
public UILabel[] namesLabel;
public UILabel chronoLabel;
public GameObject pausePanel;
public UISprite pauseBackground;
private bool _pause = false;
// Use this for initialization
void Start()
{
if (this.pausePanel != null)
{
this.pausePanel.SetActive(false);
this.pauseBackground.width = Screen.width;
this.pauseBackground.height = Screen.height;
this.pauseBackground.leftAnchor.absolute = 0;
this.pauseBackground.rightAnchor.absolute = 0;
this.pauseBackground.bottomAnchor.absolute = 0;
this.pauseBackground.topAnchor.absolute = 0;
}
this.panelResult.SetActive(false);
this.background.width = Screen.width;
this.background.height = Screen.height;
this.background.leftAnchor.absolute = 0;
this.background.rightAnchor.absolute = 0;
this.background.bottomAnchor.absolute = 0;
this.background.topAnchor.absolute = 0;
if (Obj == null)
return;
PositionStart = Obj.transform.position;
RotationStart = Obj.transform.rotation;
if (Network)
this.matchMaker = GameObject.Find("Network").GetComponent<RandomMatchmaker>();
this.collectibles = GameObject.Find("Collectibles");
}
// Update is called once per frame
void Update()
{
if (!Network)
{
if (Input.GetKeyDown(KeyCode.R))
this.RestartGameNoNetWork();
if (Input.GetKey(KeyCode.Escape))
{
this.showPauseUI();
}
}
else
{
if (Input.GetKey(KeyCode.Escape))
{
Camera.main.GetComponent<CameraController>().enabled = false;
//GameObject.Find("GUI").GetComponent<GUI_Parcours>().enabled = false;
//GameObject.Find("GUI").GetComponent<GUI_score>().enabled = false;
//GameObject.Find("GUI").GetComponent<GUI_classement>().enabled = false;
//GameObject.Find("score").GetComponent<GUIText>().enabled = false;
//GameObject.Find("Jauge").GetComponent<GUIText>().enabled = false;
//GameObject.Find("coeffMult").GetComponent<GUIText>().enabled = false;
GameObject.Find("Network").GetComponent<Lan_broadcast>().__udp_port++;
this.RestartGameNetWork();
this.matchMaker.BackToMenu();
}
}
}
void DrawResults()
{
int i = 0;
GameObject.Find("UI Root - Ingame").transform.GetChild(0).gameObject.SetActive(false);
this.panelResult.SetActive(true);
this.panelResult.GetComponent<SelectItem>().Select();
if (this.Network == false)
{
this.scoresLabel[0].text = "Player 1 " + this.chronoLabel.text;
}
else if (ScoreRanking.players != null)
{
ScoreRanking.players = ScoreRanking.players.OrderByDescending(kvp => kvp.Value).ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
foreach (KeyValuePair<int, int> lPair in ScoreRanking.players)
{
this.namesLabel[i].text = lPair.Key + "P";
this.scoresLabel[i].text = lPair.Value.ToString() + " Go";
i++;
}
}
inResult = true;
}
void OnTriggerEnter(Collider col)
{
if (col.gameObject.CompareTag("Plateforme") == true && Network == true)
{
this.DrawResults();
//GameObject.Find("GUI").GetComponent<GUI_Parcours>().enabled = false;
//GameObject.Find("GUI").GetComponent<GUI_classement>().enabled = false;
GameLogic.isRaceStart = false;
GameLogic.isInPreRace = false;
GameLogic.countdown = 3.0f;
Camera.main.GetComponent<CameraController>().enabled = false;
}
else if (col.gameObject.CompareTag("Player") == true && Network == false)
{
if (this.Obj == null)
return;
this.DrawResults();
//this.RestartGameNoNetWork();
}
}
public void CallEndRace()
{
this.panelResult.SetActive(false);
inResult = false;
GameLogic.SendEndRace(RandomMatchmaker.playersList, PositionStart, RotationStart);
}
public void RestartGameNoNetWork()
{
inResult = false;
this.panelResult.SetActive(false);
Obj.transform.position = PositionStart;
Obj.transform.rotation = RotationStart;
Obj.GetComponent<Controlleur>().currentSpeed = 0;
Obj.GetComponent<Controlleur>().boostSpeed = 0;
Obj.GetComponent<Moteur>().source.Play();
Obj.GetComponent<Jauge>().m_jauge = Obj.GetComponent<Jauge>().m_maxJaugeValue;
GameObject.Destroy(this.collectibles);
GameObject.Find("UI Root - Ingame").transform.GetChild(0).gameObject.SetActive(true);
this.collectibles = GameObject.Instantiate(Resources.Load("Collectibles")) as GameObject;
GameObject.Find("plateforme").GetComponent<movePlateform>().distanceParcouru = 0.0f;
this.chrono.ResetTimer();
}
public void RestartGameNetWork()
{
Obj.transform.position = PositionStart;
Obj.transform.rotation = RotationStart;
Camera.main.fieldOfView = Obj.GetComponent<CameraController>().mDefaultFOV;
GameLogic.SendEndRace(RandomMatchmaker.playersList, PositionStart, RotationStart);
}
private void pause(bool value = true)
{
this._pause = value;
Obj.GetComponent<Controlleur>().enabled = !this._pause;
Obj.GetComponent<Turbo>().enabled = !this._pause;
Obj.GetComponent<stabilityBoard>().enabled = !this._pause;
Obj.GetComponent<Filin>().enabled = !this._pause;
this.chrono.started = !value;
if (this._pause)
Obj.rigidbody.Sleep();
else
Obj.rigidbody.WakeUp();
}
private void returnMenu()
{
Application.LoadLevel(0);
}
private void quitGame()
{
#if UNITY_EDITOR
UnityEditor.EditorApplication.isPlaying = false;
#else
Application.Quit();
#endif
}
public void resumeGame()
{
this.pausePanel.SetActive(false);
this.pause(false);
}
public void quitRace()
{
returnMenu();
}
private void showPauseUI()
{
this.pausePanel.SetActive(true);
this.pause();
this.pausePanel.GetComponent<SelectItem>().Select();
}
}
<file_sep>/src/Gameplay/Jauge.cs
using UnityEngine;
using System.Collections;
public class Jauge : MonoBehaviour
{
#region Private members
public float _jauge;
public float m_jauge
{
get
{
return _jauge;
}
set
{
_jauge = Mathf.Clamp(value, 0.0f, m_maxJaugeValue);
if (this.m_indicJauge)
{
m_indicJauge.text = "Jauge: " + (int)_jauge;
}
}
}
private Turbo turbo;
#endregion
#region Public members
public float m_regenJauge; // Regen Jauge/s
public float m_maxJaugeValue = 70.0f; //Jauge maximum value
public float m_maxRegenValue = 30.0f; //Jauge will regen until this valeu is reached
public GUIText m_indicJauge; // Text Jauge lvl
public FmodEventAudioSource source;
public string eventName;
public FmodEventAsset asset;
#endregion
void Awake()
{
if (Moteur.CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
else
{
this.source.playOnAwake = false;
}
}
// Use this for initialization
void Start()
{
m_jauge = m_maxJaugeValue;
this.turbo = this.GetComponent<Turbo>();
}
// Update is called once per frame
void Update()
{
RegenJauge();
}
// Regen/seconde Jauge
private void RegenJauge()
{
if (m_jauge >= m_maxRegenValue)
return;
m_jauge = Mathf.Clamp(m_jauge + m_regenJauge * Time.deltaTime, 0.0f, m_maxRegenValue);
if (this.turbo.m_turbo == false)
{
this.source.Play();
}
}
}
<file_sep>/src/GUI/GUI_classement.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
public class GUI_classement : MonoBehaviour
{
#region Public Members
[HideInInspector]
public static Dictionary<string, int> mClassement;
[HideInInspector]
public GameObject player;
#endregion
#region Init
private void Start()
{
mClassement = new Dictionary<string, int>();
}
#endregion
public void Init()
{
GameObject[] lPlayers = GameObject.FindGameObjectsWithTag("Player");
foreach (GameObject lObject in lPlayers)
{
if (mClassement.ContainsKey("Player " + lObject.GetComponent<NetworkCharacter>().ID) == false)
{
mClassement.Add("Player " + lObject.GetComponent<NetworkCharacter>().ID, (int)lObject.GetComponent<Filin>().m_score);
}
if (lObject.GetPhotonView().isMine)
{
this.player = lObject;
}
}
}
#region Unity Update
private void Update()
{
if (GameLogic.isRaceStart)
{
mClassement["Player " + this.player.GetComponent<NetworkCharacter>().ID] = (int)this.player.GetComponent<Filin>().m_score;
mClassement = mClassement.OrderByDescending(kvp => kvp.Value).ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
//foreach (KeyValuePair<string, int> lPair in mClassement)
// Debug.Log(lPair.Key + ": " + lPair.Value);
}
}
#endregion
}
<file_sep>/src/Sound/DJLoup.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class DJLoup : MonoBehaviour
{
public FmodEventAsset asset;
public FmodEventAudioSource src;
public List<string> soundList;
private void Start()
{
}
private void launchTheme(int pIndex)
{
if (CreateEventInstance(asset.GetEventWithName(soundList[pIndex]), src) == false)
Debug.LogError("Can't load event: " + soundList[pIndex]);
else
src.Play();
}
public void launchMenuThemeSong()
{
if (src)
src.Stop();
launchTheme(1);
}
public void launchGameThemeSong()
{
if (src)
src.Stop();
launchTheme(0);
}
public static bool CreateEventInstance(FmodEvent srcEvent, FmodEventAudioSource source)
{
if (srcEvent == null)
{
Debug.LogError("srcEvent is null");
return false;
}
FmodEventAudioClip clip = ScriptableObject.CreateInstance<FmodEventAudioClip>();
#if UNITY_EDITOR
FmodEvent evt = ScriptableObject.CreateInstance("FmodEvent") as FmodEvent;
evt.Initialize(srcEvent);
clip.Initialize(evt);
#else
clip.Initialize(srcEvent);
#endif
source.eventClip = clip;
return true;
}
}
<file_sep>/src/Sound/Moteur.cs
using UnityEngine;
using System.Collections;
public class Moteur : MonoBehaviour
{
public int rpmWithoutBoost;
public int maxRPM;
public int maxSpeed;
public int maxBoost;
public string eventName;
public FmodEventAsset asset;
public FmodEventAudioSource source;
private Controlleur controller;
void Awake()
{
this.source = this.GetComponent<FmodEventAudioSource>();
this.source.playOnAwake = false;
}
// Use this for initialization
void Start()
{
if (CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
else
{
this.source.Play();
}
this.controller = this.GetComponent<Controlleur>();
}
// Update is called once per frame
void Update()
{
if (EndRace.inResult == false)
{
int value = this.rpmWithoutBoost / this.maxSpeed;
int boost = (this.maxRPM - this.rpmWithoutBoost) / this.maxBoost;
this.source.SetParameterValue("rpm", value * Mathf.Clamp(this.controller.currentSpeed, 0, this.maxSpeed) + boost * this.controller.boostSpeed);
}
else
{
this.source.Stop();
}
}
public static bool CreateEventInstance(FmodEvent srcEvent, FmodEventAudioSource source)
{
if (srcEvent == null)
{
Debug.LogError("srcEvent is null");
return false;
}
FmodEventAudioClip clip = ScriptableObject.CreateInstance<FmodEventAudioClip>();
#if UNITY_EDITOR
FmodEvent evt = ScriptableObject.CreateInstance("FmodEvent") as FmodEvent;
evt.Initialize(srcEvent);
clip.Initialize(evt);
#else
clip.Initialize(srcEvent);
#endif
source.eventClip = clip;
return true;
}
}
<file_sep>/src/Camera/CameraTriggerLoop.cs
using UnityEngine;
using System.Collections;
public class CameraTriggerLoop : MonoBehaviour
{
public GameObject cam;
private void OnTriggerEnter(Collider pCol)
{
if (pCol.tag == "Player" && !pCol.isTrigger)
{
Controlleur lControlleur = pCol.gameObject.GetComponent<Controlleur>();
if (lControlleur && (lControlleur.photonView.isMine || lControlleur.noNetwork))
{
cam.GetComponent<CameraController>().inLoop = !cam.GetComponent<CameraController>().inLoop;
}
}
}
}
<file_sep>/src/Controlleur/Controlleur.cs
using UnityEngine;
using System.Collections;
public class Controlleur : Photon.MonoBehaviour
{
public float minSpeed;
public float maxSpeed;
public float acceleration;
public float decceleration;
public float dragForce;
public float currentSpeed;
public int rotateSpeedMax;
public float angleRotation;
public Transform rail;
public bool noNetwork = false;
public float forceDash;
public float consoDash;
public string eventName;
public FmodEventAsset asset;
public Vector3 curAngles;
public float boostSpeed;
public GameObject body;
public EState state
{
get { return _state; }
set { _state = value; }
}
private EState _state;
private float _speedPlateform;
public FmodEventAudioSource source;
private FXManager fxManager;
private DJLoup themeSound;
private bool collision = false;
void Awake()
{
if (Moteur.CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
else
{
this.source.playOnAwake = false;
}
if (GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().network == true && GameObject.Find("Network") == null)
{
Transform tmp = this.transform.FindChild("Hoverboard").parent;
Destroy(this.transform.FindChild("Hoverboard").gameObject);
GameObject obj = GameObject.Instantiate(Resources.Load(GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().model + "/Hoverboard"), this.transform.position, this.transform.rotation) as GameObject;
obj.transform.parent = tmp;
Vector3 vec = new Vector3(0.0f, this.transform.eulerAngles.y, 0.0f);
obj.transform.localEulerAngles = vec;
this.body = obj;
this.GetComponent<AnimationController>().anim = obj.transform.FindChild(GameObject.Find("ApplicationHandler").GetComponent<ApplicationHandler>().model).gameObject.GetComponent<Animation>();
Camera.main.GetComponent<CameraController>().mCar = this.gameObject;
}
}
// Use this for initialization
void Start ()
{
this.fxManager = this.GetComponent<FXManager>();
this.rigidbody.freezeRotation = true;
curAngles = Vector3.zero;
curAngles.y = this.calculeAngleWorld();
transform.eulerAngles = this.curAngles;
themeSound = GameObject.Find("DJLoup").GetComponent<DJLoup>();
themeSound.launchGameThemeSong();
}
public void Init()
{
curAngles = Vector3.zero;
curAngles.y = this.calculeAngleWorld();
transform.eulerAngles = this.curAngles;
if (noNetwork)
{
this.GetComponent<Filin>().Init();
}
}
// Update is called once per frame
void Update ()
{
if (photonView.isMine && GameLogic.isRaceStart || noNetwork)
{
this.DirectionUpdate();
}
}
void FixedUpdate()
{
if (photonView.isMine && GameLogic.isRaceStart || noNetwork)
{
this.CalculateSpeed();
this.Dash();
if (this.rail == null)
{
float steerForce = Input.GetAxis("Horizontal") * rotateSpeedMax;
if (steerForce > 0.5)
this.state = EState.e_RTurn;
else if (steerForce < -0.5f)
this.state = EState.e_LTurn;
else if (this.state == EState.e_Jump)
this.state = EState.e_Jump;
else
this.state = EState.e_iddle;
Quaternion rotation = Quaternion.Euler(Vector3.up * steerForce);
this.rigidbody.MoveRotation(this.rigidbody.rotation * rotation);
}
}
}
void CalculateSpeed()
{
float fwdForce = Input.GetAxis("Forward") * acceleration;
fwdForce = Mathf.Max(0, fwdForce);
if (fwdForce == 0)
{
this.currentSpeed += minSpeed - decceleration;
if (this.checkFilin())
this.currentSpeed = Mathf.Max(this.currentSpeed, _speedPlateform);
}
else
this.currentSpeed += minSpeed + fwdForce;
if (this.currentSpeed < minSpeed)
this.currentSpeed = minSpeed;
if (this.currentSpeed > this.maxSpeed)
this.currentSpeed = this.maxSpeed;
this.currentSpeed += boostSpeed;
if (this.collision)
this.currentSpeed -= this.dragForce;
this.rigidbody.AddForce(transform.forward * currentSpeed * 10, ForceMode.Force);
}
void OnCollisionEnter(Collision other)
{
this.rigidbody.constraints = RigidbodyConstraints.FreezeRotationX | RigidbodyConstraints.FreezeRotationZ;
if (other.gameObject.CompareTag("Player"))
this.source.Play();
}
void OnCollisionStay(Collision other)
{
if (other.contacts[0].normal.x < 0.0f)
{
if (this.transform.forward.z > 0.0f)
this.fxManager.collisionLeft.particleSystem.Play();
else
this.fxManager.collisionRight.particleSystem.Play();
}
if (other.contacts[0].normal.x > 0.0f)
{
if (this.transform.forward.z > 0.0f)
this.fxManager.collisionRight.particleSystem.Play();
else
this.fxManager.collisionLeft.particleSystem.Play();
}
}
void OnCollisionExit(Collision other)
{
this.rigidbody.freezeRotation = true;
this.fxManager.collisionLeft.particleSystem.Stop();
this.fxManager.collisionRight.particleSystem.Stop();
}
void OnTriggerEnter(Collider other)
{
if (other.gameObject.CompareTag("Obstacle"))
{
this.fxManager.collisionFront.particleSystem.loop = false;
this.fxManager.collisionFront.particleSystem.Play();
}
}
void OnTriggerExit(Collider other)
{
this.fxManager.collisionFront.particleSystem.Stop();
}
void DirectionUpdate()
{
if (this.rail != null)
{
float change = this.rotateSpeedMax * Input.GetAxis("Horizontal") * Time.deltaTime;
this.DirectionUpdate(change);
}
}
void DirectionUpdate(float value)
{
float angleWorld = this.calculeAngleWorld();
if (angleWorld < 0)
angleWorld += 360;
float diff = angleWorld - this.angleRotation;
float tmpAngle;
if (transform.up.y < 0)
value *= -1;
this.curAngles = transform.eulerAngles;
this.curAngles.y += value;
if (diff < 0)
{
tmpAngle = this.curAngles.y - diff;
if (tmpAngle >= 355) // a verifier pour plus de precision
tmpAngle -= 360;
tmpAngle = Mathf.Clamp(tmpAngle, 0, this.angleRotation * 2);
this.curAngles.y = tmpAngle + diff;
}
else
this.curAngles.y = Mathf.Clamp(this.curAngles.y, angleWorld - this.angleRotation, this.angleRotation + angleWorld);
transform.eulerAngles = this.curAngles;
}
float calculeAngleWorld()
{
Vector3 position;
Vector3 end;
float distance;
float angle;
if (this.rail == null)
return 0f;
distance = this.rail.GetComponent<PointToLook>().distanceParcouru;
if (distance < 0)
distance = 0;
position = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), distance / this.rail.GetComponent<PointToLook>().distanceLevel);
if (this.rail.GetComponent<PointToLook>().distanceLevel < distance + 1f)
end = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), (this.rail.GetComponent<PointToLook>().distanceLevel - 1) / this.rail.GetComponent<PointToLook>().distanceLevel);
else
end = iTween.PointOnPath(iTweenPath.GetPath("PointToLook"), (distance + 1f) / this.rail.GetComponent<PointToLook>().distanceLevel);
end = end - position;
angle = Vector3.Angle(Vector3.forward.normalized, end.normalized);
if (end.x > 0)
return angle;
return -angle;
}
void Dash()
{
if (Input.GetButtonDown("Dash Left"))
this.MakeDash(-transform.right);
if (Input.GetButtonDown("Dash Right"))
this.MakeDash(transform.right);
}
public void setBoostSpeed(float value)
{
boostSpeed = value;
}
private void MakeDash(Vector3 direction)
{
Jauge tmp = this.GetComponent<Jauge>();
if (tmp != null && consoDash < tmp._jauge)
{
this.rigidbody.AddForce(direction * forceDash, ForceMode.Impulse);
tmp.m_jauge = tmp._jauge - consoDash;
}
}
private bool checkFilin()
{
Filin fil = this.GetComponent<Filin>();
movePlateform plateform;
if (fil == null)
return false;
if (fil.getPlayer() == this.gameObject)
{
plateform = fil.m_target.GetComponent<movePlateform>();
if (plateform == null || !plateform.isLaunch)
return false;
_speedPlateform = fil.m_target.GetComponent<movePlateform>().speedPlateforme;
return true;
}
return false;
}
}
<file_sep>/src/Sound/TriggerSound.cs
using UnityEngine;
using System.Collections;
public class TriggerSound : MonoBehaviour
{
public string eventName;
public FmodEventAsset asset;
private FmodEventAudioSource source;
void Awake()
{
this.source = this.GetComponent<FmodEventAudioSource>();
this.source.playOnAwake = false;
}
void Start()
{
if (Moteur.CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
}
void OnTriggerEnter()
{
this.source.Play();
}
}
<file_sep>/src/GUI/MoveCadenas.cs
using UnityEngine;
using System.Collections;
public class MoveCadenas : MonoBehaviour
{
public UISprite cadenas;
public float speedOpacite;
public float speedSize;
private int realWidth;
private int realHeight;
private bool changed = false;
private float currentSize;
private UISprite currentCharacter;
void Start()
{
this.cadenas.alpha = 0.0f;
this.cadenas.width = Mathf.FloorToInt(Screen.width / 10);
this.cadenas.leftAnchor.absolute = Screen.width >> 1;
this.cadenas.rightAnchor.absolute = this.cadenas.leftAnchor.absolute - this.cadenas.width;
this.realHeight = this.cadenas.height;
this.realWidth = this.cadenas.width;
}
public void UpdateCharacterLocked(UISprite image)
{
this.currentCharacter = image;
if (this.currentCharacter == null)
{
this.cadenas.gameObject.SetActive(false);
}
else
{
this.cadenas.alpha = 0.0f;
this.cadenas.width = Mathf.FloorToInt(this.cadenas.width * 1.2f);
this.currentSize = this.cadenas.width;
this.cadenas.leftAnchor.absolute = this.currentCharacter.rightAnchor.absolute - this.cadenas.width;
this.cadenas.rightAnchor.absolute = this.cadenas.leftAnchor.absolute + this.cadenas.width;
this.cadenas.bottomAnchor.absolute = this.currentCharacter.bottomAnchor.absolute;
this.cadenas.topAnchor.absolute = this.cadenas.bottomAnchor.absolute + this.cadenas.height;
this.cadenas.gameObject.SetActive(true);
this.changed = true;
}
}
void Update()
{
if (this.changed == true)
{
if (this.AnimLock() == true)
{
this.changed = false;
}
}
}
bool AnimLock()
{
bool ret = false;
this.cadenas.alpha += this.speedOpacite * Time.deltaTime;
if (this.cadenas.alpha > 1.0f)
this.cadenas.alpha = 1.0f;
this.currentSize -= this.speedSize * Time.deltaTime;
this.cadenas.width = Mathf.FloorToInt(this.currentSize);
this.cadenas.leftAnchor.absolute = this.currentCharacter.rightAnchor.absolute - this.cadenas.width;
this.cadenas.rightAnchor.absolute = this.cadenas.leftAnchor.absolute + this.cadenas.width;
this.cadenas.bottomAnchor.absolute = this.currentCharacter.bottomAnchor.absolute;
this.cadenas.topAnchor.absolute = this.cadenas.bottomAnchor.absolute + this.cadenas.height;
if (this.cadenas.width < this.realWidth)
this.cadenas.width = this.realWidth;
if (this.cadenas.alpha >= 1.0f && this.cadenas.width == this.realWidth)
{
ret = true;
}
return ret;
}
}<file_sep>/src/GUI/GUI_score.cs
using UnityEngine;
using System.Collections;
public class GUI_score : Photon.MonoBehaviour {
#region Public variables
public GUIText m_score = null;
#endregion
#region Private Variables
private GameLogic _gameLogic = null;
#endregion
// Initialize
public void init()
{
GameObject lNetwork = GameObject.Find("Network");
if (lNetwork)
_gameLogic = lNetwork.GetComponent<GameLogic>();
}
// GUI
void OnGUI()
{
if (_gameLogic)
{
if (_gameLogic.Player)
{
Filin lFilin = _gameLogic.Player.GetComponent<Filin>();
if (lFilin && m_score)
m_score.text = "" + (int)lFilin.m_score;
}
}
else if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>().noNetwork)
{
Filin lFilin = GameObject.FindGameObjectWithTag("Player").GetComponent<Filin>();
if (lFilin && m_score)
m_score.text = "" + (int)lFilin.m_score;
}
}
}
<file_sep>/src/Camera/TriggerNear.cs
using UnityEngine;
using System.Collections;
public class TriggerNear : MonoBehaviour
{
public float nextNear;
public float lastNear;
private Camera cam;
// Use this for initialization
void Start ()
{
cam = Camera.main;
}
void OnTriggerEnter(Collider col)
{
Controlleur cont;
float angle;
if (cam == null)
return;
if (col.CompareTag("Player") == true)
{
cont = col.gameObject.GetComponent<Controlleur>();
if (cont != null && (cont.photonView.isMine || cont.noNetwork))
{
angle = Vector3.Angle(col.transform.forward, this.transform.forward);
if (angle > 0.0f && angle <= 90.0f)
cam.nearClipPlane = lastNear;
else
cam.nearClipPlane = nextNear;
}
}
}
}
<file_sep>/src/Gameplay/CheckVisibility.cs
using UnityEngine;
using System.Collections;
public class CheckVisibility : MonoBehaviour
{
public bool visible;
public GameObject fx;
void Start()
{
this.visible = false;
this.fx.SetActive(false);
}
void OnBecameVisible()
{
this.visible = true;
this.fx.SetActive(true);
}
void OnBecameInvisible()
{
this.visible = false;
this.fx.SetActive(false);
}
}
<file_sep>/src/Controlleur/Dash.cs
using UnityEngine;
using System.Collections;
public class Dash : MonoBehaviour {
#region Private members
// private CustomCharacterController _controller;
private Jauge _jauge;
private bool[] _state = { false, false, false };
private bool _hasDashing;
private float _traveledDist;
#endregion
#region Public members
public float m_dashDistSpeed; // traveled distance/s
public float m_dashDist; // traveled distance
public float m_dashConso; // jauge conso/action
#endregion
// Use this for initialization
void Start () {
// _controller = GetComponent<CustomCharacterController>();
_jauge = GetComponent<Jauge>();
_hasDashing = false;
}
// Update is called once per frame
void Update () {
if (!_hasDashing)
setState ();
if (_hasDashing) {
if (_state[0])
dash(transform.TransformDirection(Vector3.left));
else if (_state[1])
dash(transform.TransformDirection(Vector3.right));
else if (_state[2])
dash(transform.TransformDirection(Vector3.forward));
}
}
private void dash(Vector3 pDir)
{
float lcurTraveledDist = m_dashDistSpeed * Time.deltaTime;
_traveledDist += lcurTraveledDist;
if (_traveledDist > m_dashDist)
lcurTraveledDist -= _traveledDist - m_dashDist;
if (_traveledDist >= m_dashDist) {
_hasDashing = false;
_jauge.m_jauge -= m_dashConso;
_traveledDist = 0.0f;
}
//GetComponent<Rigidbody>().MovePosition(transform.position + pDir * lcurTraveledDist);
}
private void setState() {
_state[0] = Input.GetButtonDown("Dash Left");
_state[1] = Input.GetButtonDown("Dash Right");
_state[2] = Input.GetButtonDown("Dash Forward");
if ((_state[0] || _state[1] || _state[2]) && _jauge.m_jauge >= m_dashConso)
_hasDashing = true;
}
}
<file_sep>/src/Gameplay/Consommable.cs
using UnityEngine;
using System.Collections;
public class Consommable : MonoBehaviour
{
public float slowCoef;
public float bonusJauge;
#region Private members
private Jauge _jauge;
private Controlleur _hoverboard;
private AnimationController animController;
private FXManager fxManager;
#endregion
// Use this for initialization
void Start()
{
_jauge = GetComponent<Jauge>();
_hoverboard = GetComponent<Controlleur>();
this.animController = this.GetComponent<AnimationController>();
this.fxManager = this.GetComponent<FXManager>();
}
void OnTriggerEnter(Collider pCol)
{
if (pCol.tag == "Boost" && pCol.gameObject.GetComponent<Destructible>().destroyed == false)
{
this.fxManager.grabCollectiblePart1.particleSystem.Play();
this.fxManager.grabCollectiblePart2.particleSystem.Play();
this._jauge.m_jauge += bonusJauge;
pCol.gameObject.GetComponent<Destructible>().destroyed = true;
Destroy(pCol.gameObject);
}
else if (pCol.tag == "Obstacle")
{
this.animController.hit = true;
_hoverboard.currentSpeed -= (_hoverboard.currentSpeed * slowCoef);
// call it for playing anim on destruction
//pCol.gameObject.GetComponent<Destructible>().PlayAnim();
// call it for playing particules on destruction
Destroy(pCol.gameObject);
}
}
}
<file_sep>/src/Plateforme/ElectricDischarge.cs
using UnityEngine;
using System.Collections;
public class ElectricDischarge : Photon.MonoBehaviour
{
#region Public field
public float m_MinTimer; // en s
public float m_MaxTimer; // en s
public float m_IndicCame; // Indic apparition
public Color m_IndicDechMat; // Blink
public GameObject m_DechFX; // Fx decharge
public string eventName;
public FmodEventAsset asset;
public FmodEventAudioSource source;
public GameObject drone;
#endregion
#region Private field
private float _time;
private float _timeRand;
private PlateformData _plateformData;
private Color _mainColor;
#endregion
#region Init
void Awake()
{
this.source = this.GetComponent<FmodEventAudioSource>();
this.source.playOnAwake = false;
if (Moteur.CreateEventInstance(this.asset.GetEventWithName(this.eventName), this.source) == false)
Debug.LogError("Can't load event: " + this.eventName);
_mainColor = drone.renderer.materials[1].color;
}
public void Start()
{
if (GameObject.FindGameObjectWithTag("Player"))
{
if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>())
{
if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>().noNetwork)
{
Init();
ResetTime();
SetRand(Random.Range(m_MinTimer, m_MaxTimer));
}
}
}
}
public void Init()
{
_plateformData = GetComponent<PlateformData>();
if (PhotonNetwork.isMasterClient)
{
photonView.RPC("ResetTime", PhotonTargets.All);
photonView.RPC("SetRand", PhotonTargets.All, Random.Range(m_MinTimer, m_MaxTimer));
}
}
#endregion
#region Update
void Update()
{
if (_plateformData && _plateformData.m_Player)
{
if (PhotonNetwork.isMasterClient)
photonView.RPC("SetTime", PhotonTargets.All, _time + Time.deltaTime);
else if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>().noNetwork)
SetTime(_time + Time.deltaTime);
if (_time >= _timeRand - m_IndicCame && _time < _timeRand)
{
if (_time % 0.2f >= 0.1f)
drone.renderer.materials[1].color = m_IndicDechMat;
else
drone.renderer.materials[1].color = _mainColor;
}
else if (_time > _timeRand)
{
if (PhotonNetwork.isMasterClient)
photonView.RPC("SendDischarge", PhotonTargets.All);
else if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>().noNetwork)
Discharge();
}
}
else
{
if (PhotonNetwork.isMasterClient)
{
photonView.RPC("SetRand", PhotonTargets.All, Random.Range(m_MinTimer, m_MaxTimer));
photonView.RPC("ResetTime", PhotonTargets.All);
}
else if (GameObject.FindGameObjectWithTag("Player"))
{
if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>())
{
if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>().noNetwork)
{
SetRand(Random.Range(m_MinTimer, m_MaxTimer));
ResetTime();
}
}
}
drone.renderer.materials[1].color = _mainColor;
}
}
#endregion
#region Reseau
public void OnPhotonSerializeView(PhotonStream stream, PhotonMessageInfo info)
{
}
[RPC]
void SendDischarge()
{
Discharge();
}
[RPC]
void SetTime(float time)
{
_time = time;
}
[RPC]
void ResetTime()
{
_time = 0.0f;
}
[RPC]
void SetRand(float rand)
{
_timeRand = rand;
}
#endregion
#region GameLogic
void Discharge()
{
_plateformData.m_Player.GetComponent<AnimationController>().hit = true;
_plateformData.m_Player.GetComponent<FXManager>().discharge.particleSystem.Play();
SetRand(Random.Range(m_MinTimer, m_MaxTimer));
drone.renderer.materials[1].color = _mainColor;
this.source.Play();
Instantiate(m_DechFX, transform.position, Quaternion.identity);
_plateformData.m_Player.GetComponent<Filin>()._filinState = eFilinState._CUT_;
_plateformData.m_Player = null;
if (PhotonNetwork.isMasterClient)
{
photonView.RPC("ResetTime", PhotonTargets.All);
photonView.RPC("SetRand", PhotonTargets.All, Random.Range(m_MinTimer, m_MaxTimer));
}
else if (GameObject.FindGameObjectWithTag("Player").GetComponent<Controlleur>().noNetwork)
{
ResetTime();
SetRand(Random.Range(m_MinTimer, m_MaxTimer));
}
}
#endregion
}
<file_sep>/src/AudioClipManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
[RequireComponent(typeof(AudioSource))]
public class AudioClipManager : MonoBehaviour
{
[SerializeField]
public List<string> names = new List<string>();
[SerializeField]
public List<AudioClip> clips = new List<AudioClip>();
private Dictionary<string, AudioClip> m_clips = new Dictionary<string, AudioClip>();
private AudioSource source;
private void Start()
{
this.source = this.GetComponent<AudioSource>();
if (this.names.Count != this.clips.Count)
Debug.LogError("AudioClipManager : list of names and clips not same length");
else
{
for (int i = 0, size = this.clips.Count; i < size; ++i)
{
this.m_clips[this.names[i]] = this.clips[i];
Debug.Log("Added to collection : " + this.names[i] + " for " + this.clips[i].ToString());
}
}
}
public bool PlayOnce(string clipName)
{
Debug.Log("Playing once : " + clipName);
if (this.m_clips.ContainsKey(clipName) == false)
return false;
else
{
this.source.PlayOneShot(this.m_clips[clipName]);
return true;
}
}
}
<file_sep>/src/GUI/FadingText.cs
using UnityEngine;
using System.Collections;
public class FadingText : MonoBehaviour
{
public UILabel label;
private void Start()
{
this.label.alpha = 0f;
}
public void Set(string text, float duration)
{
this.label.text = text;
this.label.alpha = 1f;
TweenAlpha.Begin(this.gameObject, duration, 0f);
}
}
| 34b1d329bee56b4e055905f6fb9dff90c013cf5d | [
"Markdown",
"C#"
] | 52 | C# | Blackhart/unity3D-csharp-racingGame-Tracks | a5be0eec06853ac6668201ebc746eeba39e3f91b | 9b81e46d124ec5a8bac3d4c47633ab82ef48ea3e |
refs/heads/master | <repo_name>amkessler/nc09_special<file_sep>/00_load.R
library(tidyverse)
library(janitor)
library(fs)
library(lubridate)
#combine BOE 2018 precinct results into one ####
#set directory location
data_dir <- "raw_data/2018precincts"
#list files in directory
fs::dir_ls(data_dir)
#limit to csv files
csv_files <- fs::dir_ls(data_dir, regexp = "\\.csv$")
csv_files
#start with one file
test <- read_csv(csv_files[1], col_types = cols(.default = "c"))
#now we'll read in all the files and combine into one
combined2018 <- csv_files %>%
map_dfr(read_csv, col_types = cols(.default = "c"))
#save result
saveRDS(combined2018, "processed_data/combined2018_precincts.rds")
#load 2016 results #####
nc2016 <- read_tsv("raw_data/precinct_sort_statewide_at_large_contests_no_admin_precincts_20161108.txt")
nc2016 <- nc2016 %>%
clean_names()
#filter just for presidential race
nc2016_prez <- nc2016 %>%
filter(contest_name == "US PRESIDENT")
#remove NAs (under- and over-vote records)
nc2016_prez <- nc2016_prez %>%
filter(!is.na(choice_party))
#save result
saveRDS(nc2016_prez, "processed_data/nc2016_prez.rds")
### use NC BOE full txt file for 2018 ####
#load 2018 results
nc2018 <- read_tsv("raw_data/results_pct_20181106.txt")
nc2018 <- nc2018 %>%
clean_names()
#filter for NC-09 only
nc2018_house9 <- nc2018 %>%
filter(contest_name == "US HOUSE OF REPRESENTATIVES DISTRICT 09")
#save result
saveRDS(nc2018_house9, "processed_data/nc2018_house9.rds")
nc2018_house9 %>%
count(choice_party)
nc2018_house9 %>%
count(county)
# load 2019 special election results from BOE text file ####
nc2019 <- read_tsv("raw_data/results_pct_20190910.txt")
nc2019 <- nc2019 %>%
clean_names()
#filter for NC-09 only
nc2019_house9 <- nc2019 %>%
filter(contest_name == "US HOUSE OF REPRESENTATIVES DISTRICT 09")
#save result
saveRDS(nc2019_house9, "processed_data/nc2019_house9.rds")
nc2019_house9 %>%
count(choice_party)
<file_sep>/02_markdown_comparisons.Rmd
---
title: "NC-09 precinct-level comparisons"
author: "<NAME>"
date: "9/12/2019"
output: html_document
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
library(janitor)
library(lubridate)
library(reshape2)
library(writexl)
library(kableExtra)
# library(gt)
options(scipen = 999)
joined <- readRDS("processed_data/joined.rds")
robeson <- joined %>%
filter(county == "ROBESON")
```
### Let's start out with turnout: How many precincts saw more votes for each party in the special compared with the original midterms in November. How many saw less?
Democrats: Nearly every precinct saw votes *fall* from November
```{r echo=FALSE}
joined %>%
count(dem_updown_votes) %>%
arrange(desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
The same was true for Republicans.
```{r echo=FALSE}
joined %>%
count(gop_updown_votes) %>%
arrange(desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
So we know this was a low-turnout election broadly and people who came out in their precincts in November didn't show up this time.
### Now let's look at each party's **percent** of the vote in each precinct
#### Did Tuesday's margins go up or down since Nov?
Democratic
```{r echo=FALSE}
joined %>%
count(dem_updown) %>%
arrange(desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
Republican
```{r echo=FALSE}
joined %>%
count(gop_updown) %>%
arrange(desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
So we can see Republicans did significantly better. Even though turnout overall was lower, the Republican voters showed up more often at the precinct level, allowing the party to boost its margins this time.
#### Where did Democratic percent of the vote fall from Nov?
Let's see which counties had the largest number of precincts with lower Democratic margins this time compared with November's midterms
```{r echo=FALSE}
joined %>%
count(dem_updown, county) %>%
filter(dem_updown == "down") %>%
arrange(desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
It's clear that Robeson County plays a key role in the Democratic struggles on Tues night. That much was known quite quickly, though let's dive in a little deeper now and see what the precincts tell us.
<!-- Where did Republican percent of the vote fall from Nov -->
<!-- ```{r echo=FALSE} -->
<!-- joined %>% -->
<!-- count(gop_updown, county) %>% -->
<!-- filter(gop_updown == "down") %>% -->
<!-- arrange(desc(n)) %>% -->
<!-- kable() %>% -->
<!-- kable_styling(bootstrap_options = c("striped", "condensed"), -->
<!-- full_width = F, -->
<!-- position = "left") -->
<!-- ``` -->
### Precincts that flipped parties on Tuesday
#### Let's see if Robeson features prominently or not among precincts that flipped from blue to red, or vice versa.
Total flipped precincts on Tues night show us there were 27 such precincts district-wide.
```{r echo=FALSE}
joined %>%
count(flip) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
Looking by party, it's clear Republicans owned the night for flips
```{r echo=FALSE}
joined %>%
filter(flip == "Y") %>%
count(flip, winner19) %>%
arrange(desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
So now we also know that Republicans were much more successful on Tuesday night in flipping precincts.
This matches up with our previous breakdowns above showing how Republicans gained greater shares of the vote in more precincts than Democrats.
Now we know they were *also better a flipping them altogether* and winning precincts Democrats had carried in November.
#### So which *counties* had most flips by party?
Republicans flipped a huge chunk (15!) in **Robeson County**
```{r echo=FALSE}
joined %>%
filter(flip == "Y") %>%
count(flip, winner19, county) %>%
arrange(desc(winner19), desc(n)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
#### Diving deeper into Robeson County: How the Dems lost ground and why it was so devastating for Tuesday's results
Democrats lost vote share in nearly every Robeson precinct compared with 2018.
The table below shows the number of precincts the Dem share went up or down in *just Robeson*.
```{r echo=FALSE}
robeson %>%
count(dem_updown) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
And in fact 33 is also the number of precincts Democrats won in 2018. This time? Not so much.
```{r echo=FALSE}
robeson %>%
count(winner18) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
robeson %>%
count(winner19) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
Here's what that looks like in terms of number of votes in those precincts. 2018 compared to 2019.
```{r echo=FALSE}
robeson %>%
group_by(winner18) %>%
summarise(n(), demvotes = sum(dem18), gopvotes = sum(gop18)) %>%
mutate(
difference = demvotes - gopvotes
) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
robeson %>%
group_by(winner19) %>%
summarise(n(), demvotes = sum(dem19), gopvotes = sum(gop19)) %>%
mutate(
difference = demvotes - gopvotes
) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
Democrats only had five precincts in Robeson with higher vote percentages than in November - and even those were low single digits.
By contrast, the losses were big and widespread. In 11 precincts, Democrats had double-digit losses in pct points compared with 2018 -- representing a loss of *more than 2,000 votes* since the midterms in just those precincts alone. (All but one of those precincts flipped from blue to red.)
```{r echo=FALSE}
robeson %>%
select(county, precinct, dem_change, dem_vote_change, flip) %>%
arrange(dem_change) %>%
head(11) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left") %>%
column_spec(3, bold = T)
```
Conversly, here are the biggest Republican wins in Robeson, and point growth from last time
```{r echo=FALSE}
robeson %>%
select(county, precinct, gop18pct, gop19pct) %>%
arrange(desc(gop19pct)) %>%
mutate(
point_boost = gop19pct - gop18pct
) %>%
head(10) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left") %>%
column_spec(4, bold = T)
```
There's even a more devastating way to look at Democratic performance in Robseon.
That is to look at the net loss of votes for Democrats on Tues vs. November -- in other words, precinct by precinct, what was the vote spread between Dem and Rep votes back in November...and then what was the vote spread on Tuesday.
When you look at the difference between those tallies, the net loss in Democratic advantage was staggering. Democrats may have "won" Robeson County on Tues, but they lost the spread difference by more than 4,000 votes.
```{r echo=FALSE}
robeson %>%
summarise(sum(votespread_GAINdiff_dem)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left")
```
### Vote-Spread Metrics
Where did Democrats *lose* the most in the vote number difference compared with Nov?
It's all about Robeson County.
```{r echo=FALSE}
joined %>%
select(county, precinct, votespread_18_dem, votespread_19_dem, votespread_GAINdiff_dem) %>%
arrange(votespread_GAINdiff_dem) %>%
filter(votespread_19_dem < 0) %>%
head(10) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left") %>%
column_spec(5, bold = T)
```
In areas where Democrats gained votes from Nov, where did they *gain* the most in vote number spread?
```{r echo=FALSE}
joined %>%
select(county, precinct, votespread_18_dem, votespread_19_dem, votespread_GAINdiff_dem) %>%
arrange(desc(votespread_GAINdiff_dem)) %>%
filter(votespread_19_dem > 0) %>%
head(10) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "condensed"),
full_width = F,
position = "left") %>%
column_spec(5, bold = T)
```
<file_sep>/README.md
# nc09_special
Analysis tied to NC 9th District special election in Sept 2019
<file_sep>/02_comparison.R
library(tidyverse)
library(janitor)
library(lubridate)
library(reshape2)
library(writexl)
library(kableExtra)
options(scipen = 999)
#load 2018 results from step 00
nc2018_house9 <- readRDS("processed_data/nc2018_house9.rds")
#remove precincts not marked as real
midterm_2018 <- nc2018_house9 %>%
filter(real_precinct == "Y")
names(midterm_2018)
midterm_2018 <- midterm_2018 %>%
select(county, precinct, choice_party, total_votes)
#reshape to get candidate votes going across
midterm_2018 <- midterm_2018 %>%
dcast(county + precinct ~ choice_party, value.var = "total_votes", sum) %>%
as_tibble()
#rename columns
midterm_2018 <- midterm_2018 %>%
rename(
dem18 = DEM,
lib18 = LIB,
gop18 = REP
)
#calculate total and percentages for each candidate
midterm_2018 <- midterm_2018 %>%
mutate(
total18 = (dem18 + lib18 + gop18),
dem18pct = round_half_up((dem18/total18)*100, 1),
gop18pct = round_half_up((gop18/total18)*100, 1),
margin18 = abs(dem18pct - gop18pct)
)
#save result to file
saveRDS(midterm_2018, "processed_data/midterm_2018.rds")
#load 2019 special election results from step 00 #### -------------------------------------
nc2019_house9 <- readRDS("processed_data/nc2019_house9.rds")
#remove precincts not marked as real
special_2019 <- nc2019_house9 %>%
filter(real_precinct == "Y")
names(special_2019)
special_2019 <- special_2019 %>%
select(county, precinct, choice_party, total_votes)
#reshape to get candidate votes going across
special_2019 <- special_2019 %>%
dcast(county + precinct ~ choice_party, value.var = "total_votes", sum) %>%
as_tibble()
#rename columns
special_2019 <- special_2019 %>%
rename(
dem19 = DEM,
lib19 = LIB,
gop19 = REP,
gre19 = GRE
)
#calculate total and percentages for each candidate
special_2019 <- special_2019 %>%
mutate(
total19 = (dem19 + lib19 + gop19 + gre19),
dem19pct = round_half_up((dem19/total19)*100, 1),
gop19pct = round_half_up((gop19/total19)*100, 1),
margin19 = abs(dem19pct - gop19pct)
)
#save result to file
saveRDS(special_2019, "processed_data/special_2019.rds")
### JOIN TOGETHER AND COMPARE #### ----------------------------------
joined_allcols <- inner_join(midterm_2018, special_2019)
joined_allcols
names(joined_allcols)
#select just the columns needed for the pct comparisons
joined <- joined_allcols %>%
select(
county,
precinct,
dem18,
gop18,
dem18pct,
gop18pct,
margin18,
dem19,
gop19,
dem19pct,
gop19pct,
margin19
)
#create derived columns to use for analysis
joined <- joined %>%
mutate(
winner18 = if_else(dem18pct > gop18pct, "D", "R"),
winner19 = if_else(dem19pct > gop19pct, "D", "R"),
flip = if_else(winner18 == winner19, "N", "Y"),
dem_change = dem19pct - dem18pct,
dem_vote_change = dem19 - dem18,
dem_updown = if_else(dem_change < 0, "down", "up"),
dem_updown = if_else(dem_change == 0, "no change", dem_updown),
dem_updown_votes = if_else(dem_vote_change < 0, "down", "up"),
dem_updown_votes = if_else(dem_vote_change == 0, "no change", dem_updown_votes),
gop_change = gop19pct - gop18pct,
gop_vote_change = gop19 - gop18,
gop_updown = if_else(gop_change < 0, "down", "up"),
gop_updown = if_else(gop_change == 0, "no change", gop_updown),
gop_updown_votes = if_else(gop_vote_change < 0, "down", "up"),
gop_updown_votes = if_else(gop_vote_change == 0, "no change", gop_updown_votes),
margin_tot_change = round_half_up(margin19 - margin18, 1),
vote_change_ratio_dem = round_half_up(dem_vote_change/gop_vote_change, 2),
vote_change_ratio_gop = round_half_up(gop_vote_change/dem_vote_change, 2)
)
#calculate vote spread for dems against gop in each election, then compared
joined <- joined %>%
mutate(
votespread_18_dem = dem18-gop18,
votespread_19_dem = dem19-gop19,
votespread_GAINdiff_dem = votespread_19_dem - votespread_18_dem
)
#save to file
saveRDS(joined, "processed_data/joined.rds")
write_xlsx(joined, "processed_data/joined.xlsx")
# ANALYSIS #### ----------------------------------------------
joined
#flips
joined %>%
count(flip)
joined %>%
filter(flip == "Y") %>%
count(flip, winner19)
joined %>%
filter(flip == "Y") %>%
count(flip, winner19, county) %>%
arrange(winner19, desc(n))
#up or down
joined %>%
count(dem_updown)
joined %>%
count(gop_updown)
# vote-spread analysis
joined %>%
arrange(desc(votespread_GAINdiff_dem)) %>%
View()
# this one shows the poles of Robeson and Meckelburg ******
joined %>%
arrange(desc(votespread_GAINdiff_dem)) %>%
filter(votespread_19_dem > 0) %>%
View()
#another view of Robeson at the pole
joined %>%
arrange(votespread_GAINdiff_dem) %>%
filter(votespread_19_dem < 0) %>%
View()
#which counties
joined %>%
count(county)
# ROBESON
robeson <- joined %>%
filter(county == "ROBESON")
#flips
robeson %>%
count(flip)
robeson %>%
filter(flip == "Y") %>%
count(flip, winner19)
#up or down
robeson %>%
count(dem_updown)
robeson %>%
count(gop_updown)
<file_sep>/01_analysis.R
library(tidyverse)
library(janitor)
library(fs)
library(lubridate)
#load precinct data from step 00
combined2018 <- readRDS("processed_data/combined2018_precincts.rds")
glimpse(combined2018)
combined2018$group_vote_ct_adj <- as.numeric(combined2018$group_vote_ct_adj)
combined2018$group_vote_ct_adj <- combined2018$group_vote_ct_adj - 1 #adjustment per documentation: see READ_ME file
#pull out only results for NC-09 congressional district
nc09_precincts18 <- combined2018 %>%
filter(contest_title == "US HOUSE OF REPRESENTATIVES DISTRICT 09")
#pull grand totals by candidate
nc09_precincts18 %>%
filter(!candidate_name %in% c("OVER VOTE", "UNDER VOTE")) %>%
group_by(candidate_name, candidate_party_lbl) %>%
summarise(cnt = n(), sum(group_vote_ct_adj))
#pull grand totals by candidate
nc09_grouped_precincts <- nc09_precincts18 %>%
filter(!candidate_name %in% c("OVER VOTE", "UNDER VOTE"),
county_desc %in% c("UNION",
"MECKLENBURG",
"ANSON",
"RICHMOND",
"SCOTLAD",
"ROBESON",
"BLADEN",
"CUMBERLAND")
) %>%
group_by(county_desc, precinct_code, precinct_name, candidate_name, candidate_party_lbl) %>%
summarise(cnt = n(), num_votes = sum(group_vote_ct_adj))
nc09_grouped_precincts
#save results to files
write_csv(nc09_grouped_precincts, "processed_data/nc09_2018_grouped_precincts.csv")
saveRDS(nc09_grouped_precincts, "processed_data/nc09_2018_grouped_precincts.rds")
#### 2016 PRESIDENTIAL ####
#load saved version from step 00
nc2016_prez <- readRDS("processed_data/nc2016_prez.rds")
#filter to NC-09 counties only
nc09_prez16 <- nc2016_prez %>%
filter(county%in% c("UNION",
"MECKLENBURG",
"ANSON",
"RICHMOND",
"SCOTLAD",
"ROBESON",
"BLADEN",
"CUMBERLAND"))
nc09_prez16
<file_sep>/03_mapping.R
# https://r-spatial.github.io/sf/articles/sf1.html
library(sf)
anson <- st_read("geo_data/anson.kml")
plot(anson[1])
anson
union <- st_read("geo_data/union.kml")
plot(union[1])
merged <- st_read("geo_data/mergedKML_NC09precincts.kml")
plot(merged)
plot(merged[3])
| e740b41558cf369d861ea744d27c530762f966e3 | [
"Markdown",
"R",
"RMarkdown"
] | 6 | R | amkessler/nc09_special | 4ea24113c0479d598c6fa26bcbee68e3a8f326b5 | 2012ea322db8abc0a88090822f292f367c597f17 |
refs/heads/master | <repo_name>joonasnordstrom/web-application-challenge<file_sep>/WebApplicationChallenge/WebApplicationChallengeTests/IntegrationTests/DiffAPITests.cs
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using System;
using System.Text;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Models;
using VincitWebApplication.Domain.Repositories;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.Domain.Services.Communication;
using VincitWebApplicationTests.Helpers;
using Xunit;
namespace VincitWebAppTests.IntegrationTests
{
public class DiffAPITests : IClassFixture<IntegrationTestFixture>
{
private readonly TestWebApplicationFactory<VincitWebApplication.Startup> _webApplicationFactory;
private readonly ISensorService _sensorService;
public DiffAPITests(IntegrationTestFixture integrationTestFixture)
{
_sensorService = integrationTestFixture.SensorService;
_webApplicationFactory = integrationTestFixture.WebApplicationFactory;
}
[Fact(DisplayName = "It should return sensor by specified ID")]
public async Task Get_Valid_Diff()
{
double difference = await _sensorService.GetDifferenceAsync("000D6F0003141E14");
var expectedResult = new DiffResponse("Success", difference);
var client = _webApplicationFactory.CreateClient();
var response = await client.GetAsync(@"https://localhost:44338/api/diff/000D6F0003141E14");
response.EnsureSuccessStatusCode();
Expect.Equal("application/json; charset=utf-8", response.Content.Headers.ContentType.ToString());
var responseString = await response.Content.ReadAsStringAsync();
Expect.DeepEqualLowerCaseFields(expectedResult, responseString);
}
[Fact(DisplayName = "It should return status code 404")]
public async Task Get_Invalid_Diff()
{
string id = "THIS_IS_NOT_A_VALID_ID_FOR_SURE";
var expectedResult = new NotFoundObjectResult(new DiffResponse(id)).Value;
var client = _webApplicationFactory.CreateClient();
var response = await client.GetAsync($"https://localhost:44338/api/diff/{id}");
Expect.Equal(System.Net.HttpStatusCode.NotFound, response.StatusCode);
Expect.Equal("application/json; charset=utf-8", response.Content.Headers.ContentType.ToString());
var responseString = await response.Content.ReadAsStringAsync();
// TODO someone is formatting my decimals
Expect.DeepEqualLowerCaseFields(expectedResult, responseString);
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallengeTests/Helpers/Expect.cs
using Newtonsoft.Json.Serialization;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Text;
using Xunit;
namespace VincitWebApplicationTests.Helpers
{
public class Expect : Assert
{
/// <summary>
/// Checks if two objects have the exactly same attributes
/// </summary>
public static void DeepEqual(object expected, object actual)
{
Equal(JsonConvert.SerializeObject(expected), JsonConvert.SerializeObject(actual));
}
/// <summary>
/// Checks if object coerced to a string and string match exactly
/// </summary>
public static void DeepEqual(object expected, string actual)
{
Equal(JsonConvert.SerializeObject(expected), actual);
}
/// <summary>
/// Checks if object coerced to a string and string match exactly
/// </summary>
public static void DeepEqualLowerCaseFields(object expected, string actual)
{
Equal(JsonConvert.SerializeObject(expected, new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver()
}), actual);
}
/// <summary>
/// Checks if string and an object coerced to a string match exactly
/// </summary>
public static void DeepEqual(string expected, object actual)
{
Equal(expected, JsonConvert.SerializeObject(actual));
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Services/SensorService.cs
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Models;
using VincitWebApplication.Domain.Repositories;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.DTOs;
using VincitWebApplication.Helpers;
namespace VincitWebApplication.Services
{
/// <summary>
/// Implementations for Business Logic layer of Sensor
/// </summary>
public class SensorService : ISensorService
{
private readonly IRepositoryWrapper _repoWrapper;
public SensorService(IRepositoryWrapper repoWrapper)
{
_repoWrapper = repoWrapper;
}
public async Task<List<SummaryDTO>> GetSummaryAsync()
{
return await _repoWrapper.Sensor.SummaryAsync();
}
public async Task<double> GetDifferenceAsync(string sensorId)
{
CurrentTempHTMLParser html = new CurrentTempHTMLParser();
double currentTempInHelsinki = await html.GetCurrentTempAsync();
CubesensorsDatum sensorById = await _repoWrapper.Sensor.FindLatestByIdAsync(sensorId);
double latestSensorTemp = ((double)sensorById.Temperature) / 100;
return Math.Abs(currentTempInHelsinki - latestSensorTemp);
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Controllers/API/DiffController.cs
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using System;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.Domain.Services.Communication;
namespace VincitWebApplication.Controllers
{
/// <summary>
/// API endpoints for api/diff
/// </summary>
[Route("api/diff")]
[ApiController]
public class DiffController : ControllerBase
{
private readonly ISensorService _sensorService;
public DiffController(ISensorService sensorService)
{
_sensorService = sensorService;
}
// GET: api/diff/5
[HttpGet("{id}")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
[ProducesResponseType(StatusCodes.Status500InternalServerError)]
public async Task<IActionResult> GetDifference(string id)
{
try{
var difference = await _sensorService.GetDifferenceAsync(id);
DiffResponse response = new DiffResponse("Success", difference);
return Ok(response);
}
catch(InvalidOperationException )
{
// TODO log exception
return NotFound(new DiffResponse(id));
}
catch(Exception)
{
//TODO log exception
return StatusCode(500, new DiffResponse("Internal server error"));
}
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Services/Communication/DiffResponse.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace VincitWebApplication.Domain.Services.Communication
{
/// <summary>
/// Clinet-side response for api/diff GET endpoint
/// </summary>
public class DiffResponse : BaseResponse
{
public double Difference { get; set; }
private DiffResponse(bool success, string message, in double diff) : base(success, message)
{
Difference = diff;
}
/// <summary>
/// Creates an success response.
/// </summary>
/// <param name="message">Success message.</param>
/// <returns>Response</returns>
public DiffResponse(string message, in double diff) : this(true, message, diff)
{ }
/// <summary>
/// Creates an error response.
/// </summary>
/// <param name="message">Error message.</param>
/// <returns>Response</returns>
public DiffResponse(string id) : this(false, $"Sensor not found. Id: {id}.", 0)
{ }
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Persistence/Repositories/CubesensorsDatumRepository.cs
using Microsoft.EntityFrameworkCore;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Models;
using VincitWebApplication.Domain.Repositories;
using VincitWebApplication.DTOs;
using VincitWebApplication.Persistence.Contexts;
namespace VincitWebApplication.Persistence.Repositories
{
/// <summary>
/// This repository handles all database queries for datatable cubesensors_data
/// </summary>
public class CubesensorsDatumRepository : BaseRepository<CubesensorsDatum>, ICubesensorsDatumRepository
{
public CubesensorsDatumRepository(iot_dbContext context) : base(context) { }
/// <summary>
/// Get all sensors from database
/// </summary>
public async Task<List<CubesensorsDatum>> ListAsync()
{
return await base.FindAll()
.ToListAsync();
}
/// <summary>
/// Get summary of sensors from database
/// </summary>
public async Task<List<SummaryDTO>> SummaryAsync()
{
var sensorSummary = await Context.CubesensorsData
.GroupBy(sensor => sensor.SensorId)
.Select(group => new SummaryDTO
{
SensorId = group.Key,
Count = group.Count(),
AvgTemp = (double)(group.Sum(i => i.Temperature) / group.Count()) / 100,
})
.ToListAsync();
return sensorSummary;
}
/// <summary>
/// Get latest measurement by SensorId
/// </summary>
public async Task<CubesensorsDatum> FindLatestByIdAsync(string id)
{
return await base.FindByCondition(sensor => sensor.SensorId == id)
.OrderBy(sensor => sensor.MeasurementTime)
.LastAsync();
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/DTOs/SummaryDTO.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace VincitWebApplication.DTOs
{
/// <summary>
/// Client-side data transfer object (DTO) for database model CubesensorsDatum api/summary responses
/// </summary>
public class SummaryDTO
{
public string SensorId { get; set; }
public int Count { get; set; }
public double AvgTemp { get; set; }
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Repositories/IRepositoryWrapper.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace VincitWebApplication.Domain.Repositories
{
public interface IRepositoryWrapper
{
ICubesensorsDatumRepository Sensor { get; }
Task CompleteAsync();
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Persistence/Contexts/iot_dbContext.cs
using Microsoft.EntityFrameworkCore;
using VincitWebApplication.Domain.Models;
#nullable disable
namespace VincitWebApplication.Persistence.Contexts
{
/// <summary>
/// Database context (ORM) for handling database queries
/// </summary>
public class iot_dbContext : DbContext
{
public DbSet<CubesensorsDatum> CubesensorsData { get; set; }
public DbSet<OutsideTemperature> OutsideTemperatures { get; set; }
public iot_dbContext()
{ }
public iot_dbContext(DbContextOptions<iot_dbContext> options)
: base(options)
{ }
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{ }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<CubesensorsDatum>(entity =>
{
entity.HasNoKey();
entity.ToTable("cubesensors_data");
entity.Property(e => e.Battery).HasColumnType("INT(10,0)");
entity.Property(e => e.Cable).HasColumnType("INT(10,0)");
entity.Property(e => e.Humidity).HasColumnType("INT(10,0)");
entity.Property(e => e.Light).HasColumnType("INT(10,0)");
entity.Property(e => e.MeasurementTime)
.IsRequired()
.HasColumnType("DATETIME2(7)");
entity.Property(e => e.Noise).HasColumnType("INT(10,0)");
entity.Property(e => e.Pressure).HasColumnType("INT(10,0)");
entity.Property(e => e.Rssi).HasColumnType("INT(10,0)");
entity.Property(e => e.SensorId)
.IsRequired()
.HasColumnType("NVARCHAR(20)");
entity.Property(e => e.Temperature).HasColumnType("INT(10,0)");
entity.Property(e => e.Voc).HasColumnType("INT(10,0)");
entity.Property(e => e.VocResistance).HasColumnType("INT(10,0)");
});
modelBuilder.Entity<OutsideTemperature>(entity =>
{
entity.HasNoKey();
entity.ToTable("outside_temperature");
entity.Property(e => e.MeasurementTime).HasColumnType("DATETIME2");
entity.Property(e => e.Temperature).HasColumnType("NUMERIC");
});
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallengeTests/Helpers/TestWebApplicationFactory.cs
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using System;
using VincitWebApplication.Persistence.Contexts;
namespace VincitWebApplicationTests.Helpers
{
// https://docs.microsoft.com/en-us/aspnet/core/test/integration-tests?view=aspnetcore-2.2#customize-webapplicationfactory
public class TestWebApplicationFactory<TStartup> : WebApplicationFactory<TStartup> where TStartup : class
{
protected override void ConfigureWebHost(IWebHostBuilder builder)
{
builder.ConfigureServices(services =>
{
// Create a new service provider
var serviceProvider = new ServiceCollection()
.AddEntityFrameworkSqlite().BuildServiceProvider();
// Setup copy of original Sqlite database for testing
var connectionString = Environment.GetEnvironmentVariable("SqliteConnection");
services.AddEntityFrameworkSqlite().AddDbContext<iot_dbContext>(options =>
{
options.UseSqlite(connectionString);
});
// Build the service provider
var sp = services.BuildServiceProvider();
// Create a scope to obtain a reference to the database
using (var scope = sp.CreateScope())
{
var scopedServices = scope.ServiceProvider;
var db = scopedServices.GetRequiredService<iot_dbContext>();
// Ensure the database is created
db.Database.Migrate();
}
});
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Models/CubesensorsDatum.cs
using System.ComponentModel.DataAnnotations;
namespace VincitWebApplication.Domain.Models
{
/// <summary>
/// Database model for CubesensorsDatum
/// </summary>
public partial class CubesensorsDatum
{
public string SensorId { get; set; }
public byte[] MeasurementTime { get; set; }
public long? Temperature { get; set; }
public long? Pressure { get; set; }
public long? Humidity { get; set; }
public long? Voc { get; set; }
public long? Light { get; set; }
public long? Noise { get; set; }
public long? Battery { get; set; }
public long? Cable { get; set; }
public long? VocResistance { get; set; }
public long? Rssi { get; set; }
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Services/Communication/BaseResponse.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace VincitWebApplication.Domain.Services.Communication
{
/// <summary>
/// All responses should be derived from this class
/// </summary>
public abstract class BaseResponse
{
public bool Success { get; protected set; }
public string Message { get; protected set; }
public BaseResponse(bool success, string message)
{
Success = success;
Message = message;
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallengeTests/IntegrationTests/SummaryAPITests.cs
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.Domain.Services.Communication;
using VincitWebApplicationTests.Helpers;
using Xunit;
namespace VincitWebAppTests.IntegrationTests
{
public class SummaryAPITests : IClassFixture<IntegrationTestFixture>
{
private readonly TestWebApplicationFactory<VincitWebApplication.Startup> _webApplicationFactory;
private readonly ISensorService _sensorService;
public SummaryAPITests(IntegrationTestFixture integrationTestFixture)
{
_sensorService = integrationTestFixture.SensorService;
_webApplicationFactory = integrationTestFixture.WebApplicationFactory;
}
[Fact(DisplayName = "It should return list of sensors as a SummaryDTO")]
public async Task Get_Sensors()
{
var client = _webApplicationFactory.CreateClient();
var response = await client.GetAsync("https://localhost:44338/api/summary");
var expectedResult = new SummaryResponse("Success", await _sensorService.GetSummaryAsync());
response.EnsureSuccessStatusCode();
Expect.Equal("application/json; charset=utf-8", response.Content.Headers.ContentType.ToString());
var responseString = await response.Content.ReadAsStringAsync();
Expect.DeepEqualLowerCaseFields(expectedResult, responseString);
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Persistence/Repositories/RepositoryWrapper.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Repositories;
using VincitWebApplication.Persistence.Contexts;
namespace VincitWebApplication.Persistence.Repositories
{
/// <summary>
/// All common repositories can be found here.
/// Also works as an unit of work, handling database updates.
/// </summary>
public class RepositoryWrapper : IRepositoryWrapper
{
private readonly iot_dbContext _context;
public ICubesensorsDatumRepository _sensor;
public ICubesensorsDatumRepository Sensor
{
get
{
if (_sensor == null)
{
_sensor = new CubesensorsDatumRepository(_context);
}
return _sensor;
}
}
public RepositoryWrapper(iot_dbContext repositoryContext)
{
_context = repositoryContext;
}
public async Task CompleteAsync()
{
await _context.SaveChangesAsync();
}
}
}
<file_sep>/README.md
# Web application exercise
This exercise's purpose is to create a backend service that returns sensor data from the database. Database `iot_db.sqlite` has around 250k rows of sensor data.
We may decide not to continue recruitment process based on this test, so it is recommended that you have good proficiency in the language you choose to use and also show it in this test.
### Getting started
1. Select technology you want to use.
* C, C++, C#, Java, JavaScript, PHP, Python, Rust, Scala, Clojure, or Go.
* If you want use something else, ask first.
* You can use supporting libraries and frameworks.
2. Create your application.
### Functionality
Service should have these functionalities.
##### Sensor data summary
1. Get data from database table `cubesensors_data` as fast as possible.
2. Count amount of data per sensor and average temperature for each sensor.
3. Return data in JSON format.
Temperatures are stored as hundredths of degrees. For example, `1234` is `12.34` degrees Celsius.
```
{
"sensors" : [
{ "sensorId" : "000A1F0003141E11", "count" : 500, "avgTemp" : 21.4 },
{ "sensorId" : "000B2F0003141E22", "count" : 20, "avgTemp" : 19.7 }
]
}
```
##### Temperature difference
1. Get selected sensor's latest temperature.
2. Get Helsinki's current temperature from http://wttr.in/Helsinki. Temperature is in the first element that ends with ```</span> °C``` e.g. ```<span class="xxxx">-1</span> °C```.
3. Calculate the difference of these two values.
4. Return data in JSON format.
```
{
"difference" : 14.56
}
```
### Architecture and design
1. Implement interface so `htmlapp/index.html` can get data from the service. There should be no need to modify the app.
2. Design a good architecture that will support different application scenarios.
3. Write understandable code.
Other applications could also use this service in the future. For example
* a C++/C#/Java/JavaScript/Python/Go/... client, or
* another application as an e.g. Maven dependency, Node module, Python package, class library or other module.
### Important
Anyone can write or copy-paste basic small app, so remember to show your design and architecture skills.
### Tips
* You can comment what you would do.
* Be ready to explain things in the interview.
* Testing and mocking should be done as if this would be production code.
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Models/OutsideTemperature.cs
using System;
using System.Collections.Generic;
#nullable disable
namespace VincitWebApplication.Domain.Models
{
/// <summary>
/// Database model for OutsideTemperature
/// </summary>
public partial class OutsideTemperature
{
public byte[] MeasurementTime { get; set; }
public byte[] Temperature { get; set; }
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Repositories/ICubesensorsDatumRepository.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using VincitWebApplication.Domain.Models;
using VincitWebApplication.DTOs;
namespace VincitWebApplication.Domain.Repositories
{
public interface ICubesensorsDatumRepository : IBaseRepository<CubesensorsDatum>
{
Task<List<SummaryDTO>> SummaryAsync();
Task<CubesensorsDatum> FindLatestByIdAsync(string id);
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Extensions/ServiceExtensions.cs
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using VincitWebApplication.Domain.Repositories;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.Persistence.Contexts;
using VincitWebApplication.Persistence.Repositories;
namespace VincitWebApplication.Extensions
{
/// <summary>
/// Extension methods for IServiceCollection
/// </summary>
public static class ServiceExtensions
{
public static void ConfigureRepositories(this IServiceCollection services)
{
services.AddScoped<ICubesensorsDatumRepository, CubesensorsDatumRepository>();
services.AddScoped<IRepositoryWrapper, RepositoryWrapper>();
}
public static void ConfigureSqliteContext(this IServiceCollection services, IConfiguration configuration)
{
var connectionString = configuration.GetConnectionString("SqliteConnection");
services.AddDbContext<iot_dbContext>(options => options.UseSqlite(connectionString));
}
// Just in case for possible future front-end framework related CORS issues
public static void ConfigureCors(this IServiceCollection services)
{
services.AddCors(options =>
{
options.AddPolicy("CorsPolicy",
builder => builder.AllowAnyOrigin()
.AllowAnyMethod()
.AllowAnyHeader());
});
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Helpers/CurrentTempHTMLParser.cs
using HtmlAgilityPack;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace VincitWebApplication.Helpers
{
/// <summary>
/// Handles retreiving current temperature in Helsinki from "http://wttr.in/Helsinki"
/// </summary>
public class CurrentTempHTMLParser
{
private string Url => "http://wttr.in/Helsinki";
private HtmlWeb HTML => new HtmlWeb();
public CurrentTempHTMLParser()
{ }
/// <summary>
/// Returns current temperature in Helsinki
/// </summary>
public async Task<double> GetCurrentTempAsync()
{
HtmlDocument doc = await HTML.LoadFromWebAsync(Url);
HtmlNode response = doc.DocumentNode.SelectSingleNode("(//span/following-sibling::text()[contains(., '°C')])[1]/preceding-sibling::span[1]");
if (response == null) throw new ArgumentNullException("HTML structure has changed.");
if (!double.TryParse(response.InnerText, out double currentTemp)) throw new InvalidCastException("HTML Data changed");
return currentTemp;
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallengeTests/Helpers/IntegrationTestFixture.cs
using Microsoft.EntityFrameworkCore;
using System;
using VincitWebApplication.Domain.Repositories;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.Persistence.Contexts;
using VincitWebApplication.Persistence.Repositories;
using VincitWebApplication.Services;
namespace VincitWebApplicationTests.Helpers
{
public class IntegrationTestFixture : IDisposable
{
public readonly TestWebApplicationFactory<VincitWebApplication.Startup> WebApplicationFactory;
private readonly iot_dbContext _context;
private readonly IRepositoryWrapper _repositoryWrapper;
public readonly ISensorService SensorService;
const string PATH_TO_TEST_DB = "..\\..\\..\\iot_dbTest.sqlite";
public string CONNECTION_STRING => $"Data Source={PATH_TO_TEST_DB};";
public IntegrationTestFixture()
{
WebApplicationFactory = new TestWebApplicationFactory<VincitWebApplication.Startup>();
Environment.SetEnvironmentVariable("SqliteConnection", CONNECTION_STRING);
var contextOptions = new DbContextOptionsBuilder<iot_dbContext>()
.UseSqlite(CONNECTION_STRING)
.Options;
_context = new iot_dbContext(contextOptions);
_repositoryWrapper = new RepositoryWrapper(_context);
SensorService = new SensorService(_repositoryWrapper);
}
public void Dispose()
{
}
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Services/Communication/SummaryResponse.cs
using System;
using System.Collections.Generic;
using VincitWebApplication.DTOs;
namespace VincitWebApplication.Domain.Services.Communication
{
/// <summary>
/// Clinet-side response for CubesensorsDatum
/// </summary>
public class SummaryResponse : BaseResponse
{
public List<SummaryDTO> Sensors { get; set; }
private SummaryResponse(bool success, string message, in List<SummaryDTO> sensors) : base(success, message)
{
Sensors = sensors;
}
/// <summary>
/// Creates an success response.
/// </summary>
/// <param name="message">Success message.</param>
/// <returns>Response</returns>
public SummaryResponse(string message, in List<SummaryDTO> sensors) : this(false, message, sensors)
{ }
/// <summary>
/// Creates an error response.
/// </summary>
/// <param name="message">Error message.</param>
/// <returns>Response</returns>
public SummaryResponse(string message) : this(false, message, null)
{ }
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Domain/Services/ISensorService.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using VincitWebApplication.DTOs;
namespace VincitWebApplication.Domain.Services
{
/// <summary>
/// Interface for Business Logic layer of Sensor
/// </summary>
public interface ISensorService
{
Task<List<SummaryDTO>> GetSummaryAsync();
Task<double> GetDifferenceAsync(string sensorId);
}
}
<file_sep>/WebApplicationChallenge/WebApplicationChallenge/Controllers/API/SummaryController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using VincitWebApplication.Domain.Services;
using VincitWebApplication.Domain.Services.Communication;
using VincitWebApplication.DTOs;
namespace VincitWebApplication.Controllers
{
/// <summary>
/// API endpoints for api/summary
/// </summary>
[Route("api/summary")]
[ApiController]
public class SummaryController : ControllerBase
{
private readonly ISensorService _sensorService;
public SummaryController(ISensorService sensorService)
{
_sensorService = sensorService;
}
// GET: api/summary
[HttpGet]
[ProducesResponseType(typeof(List<SummaryDTO>), StatusCodes.Status200OK)]
[ProducesResponseType(typeof(ObjectResult), StatusCodes.Status500InternalServerError)]
public async Task<IActionResult> GetSummary()
{
try
{
SummaryResponse response;
var sensors = await _sensorService.GetSummaryAsync();
response = new SummaryResponse("Success", sensors);
return Ok(response);
}
catch (Exception )
{
//TODO log exception
return StatusCode(500, "Internal server error");
}
}
}
}
| 802b0aa05abdb6eaa849a8d9d46d83548605bf98 | [
"Markdown",
"C#"
] | 23 | C# | joonasnordstrom/web-application-challenge | ffd8467817bcee78ebe133fa8bf9d092566079d8 | 942be31872fb96927fd1ca65d3e622b5bcee712d |
refs/heads/master | <repo_name>sinsys/state-drills<file_sep>/src/components/HelloWorld/HelloWorld.js
import React from 'react';
import './HelloWorld.css';
class HelloWorld extends React.Component {
static defaultProps = {
who: "World",
options: ["World", "friend", "React", "test", "stuff", "blah"]
}
constructor(props){
super(props);
this.state = {
who: this.props.who
}
}
updateText = (option) => {
this.setState ({
who: option
})
}
render() {
return (
<div className="HelloWorld-wrapper">
<h2>Hello World + Interactivity</h2>
<p>Hello, {this.state.who}!</p>
{this.props.options.map((option) => (
<button
key={option}
onClick={() => this.updateText(option)}>
{option}
</button>
))}
</div>
)
}
}
export default HelloWorld;<file_sep>/src/components/Bomb/Bomb.js
import React from 'react';
import './Bomb.css';
class Bomb extends React.Component {
static defaultProps = {
timerStrings: ["tick", "tock"],
counter: 8
};
constructor(props){
super(props);
this.state = {
counter: this.props.counter
}
}
componentDidMount() {
this.startBomb();
}
componentWillUnmount() {
clearInterval(this.interval);
}
displayText() {
const {counter} = this.state;
if(counter <= 0) {
clearInterval(this.interval);
return "BOOM!!!!!";
}
if(counter % 2 === 0){
return this.props.timerStrings[0];
} else if(counter % 2 === 1){
return this.props.timerStrings[1];
}
}
restartBomb() {
if(this.interval){
clearInterval(this.interval);
}
this.setState({
counter: 8
})
this.startBomb();
}
startBomb() {
this.interval = setInterval(() => {
this.setState({
counter: this.state.counter -1
})
}, 1000)
}
render() {
return (
<div>
<h2>Da Bomb</h2>
<p>{this.state.counter}</p>
<p>{this.displayText()}</p>
<button
onClick={() => this.restartBomb()}>
Restart Bomb
</button>
</div>
)
}
}
export default Bomb;<file_sep>/src/components/Accordion/Accordion.js
import React from 'react';
import './Accordion.css';
class Accordion extends React.Component {
static defaultProps = {
sections: []
};
constructor(props) {
super(props);
this.state = {
activeSection: null
};
};
renderSection(section, id, activeSection) {
return (
<li
className="Accordion-section"
key={id}
>
<button
type="button"
onClick={() => {
this.handleSectionSelect(id)
}}
>
{section.title}
</button>
{(activeSection === id) &&
<p>
{section.content}
</p>
}
</li>
)
}
handleSectionSelect = (index) => {
this.setState({
activeSection: index
})
};
render() {
const { activeSection } = this.state;
const { sections } = this.props;
return (
<div className="Accordion-wrapper">
<h2>Accordion</h2>
<ul className="Accordion">
{sections.map((section, id) => (
this.renderSection(section, id, activeSection)
))}
</ul>
</div>
)
};
}
export default Accordion;<file_sep>/README.md
# React State Drills
This is a collection of state drills for reference. It includes things like updating state, using props with state side by side, some `Math ` and `Date` functions, among others as a Kitchen Sink.
## Available Components
- `TheDate` - This is a running clock that updates real-time based on 1 second update intervals.
- `Counter` - This is a very basic counting button component that updates the state by an interval set by props.
- `HelloWorld` - Expansion of the prior component. Allows you to update state by using the text of the selected button.
- `Bomb` - Uses a countdown timer to explode a bomb and updates state to demo toggling between two strings until a condition is met. A restart button is included.
- `RouletteGun` - A play on Russian Roulette. Select the button to randomly select an integer. If that integer is identical to the loaded chamber, you lose.
- `Tabs` - This is a basic tabs component to render different info based on the selected tab.
- `Accordion` - This is a basic accordion component that expands/collapses info based on the selected item.
- `ShoppingList` - This is a component consisting of 2 other components with functionality to add/remove content from lists. Also adds individual toggles for purchase, universal deletion of items. (**save for later on backlog**)
___
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## Available Scripts
In the project directory, you can run:
### `npm start`
Runs the app in the development mode.<br />
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
The page will reload if you make edits.<br />
You will also see any lint errors in the console.
### `npm test`
Launches the test runner in the interactive watch mode.<br />
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
### `npm run build`
Builds the app for production to the `build` folder.<br />
It correctly bundles React in production mode and optimizes the build for the best performance.
The build is minified and the filenames include the hashes.<br />
Your app is ready to be deployed!
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
### `npm run eject`
**Note: this is a one-way operation. Once you `eject`, you can’t go back!**
If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
Instead, it will copy all the configuration files and the transitive dependencies (Webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own.
You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it.
## Learn More
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
To learn React, check out the [React documentation](https://reactjs.org/).
### Code Splitting
This section has moved here: https://facebook.github.io/create-react-app/docs/code-splitting
### Analyzing the Bundle Size
This section has moved here: https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size
### Making a Progressive Web App
This section has moved here: https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app
### Advanced Configuration
This section has moved here: https://facebook.github.io/create-react-app/docs/advanced-configuration
### Deployment
This section has moved here: https://facebook.github.io/create-react-app/docs/deployment
### `npm run build` fails to minify
This section has moved here: https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify
<file_sep>/src/App.js
import React from 'react';
// import logo from './logo.svg';
import './App.css';
import TheDate from './components/TheDate/TheDate';
import Counter from './components/Counter/Counter';
import HelloWorld from './components/HelloWorld/HelloWorld';
import Bomb from './components/Bomb/Bomb';
import RouletteGun from './components/RouletteGun/RouletteGun';
import Tabs from './components/Tabs/Tabs';
import Accordion from './components/Accordion/Accordion';
import ShoppingList from './components/ShoppingList/ShoppingList';
const tabsProp =
[
{ name: 'First tab',
content: 'Lorem ipsum dolor sit amet consectetur adipisicing elit. Laboriosam exercitationem quos consectetur expedita consequatur. Fugit, sapiente aspernatur corporis velit, dolor eum reprehenderit provident ipsam, maiores incidunt repellat! Facilis, neque doloremque.' },
{ name: 'Second tab',
content: 'Laboriosam exercitationem quos consectetur expedita consequatur. Fugit, sapiente aspernatur corporis velit, dolor eum reprehenderit provident ipsam, maiores incidunt repellat! Facilis, neque doloremque. Lorem ipsum dolor sit amet consectetur adipisicing elit.' },
{ name: 'Third tab',
content: 'Fugit, sapiente aspernatur corporis velit, dolor eum reprehenderit provident ipsam, maiores incidunt repellat! Facilis, neque doloremque. Lorem ipsum dolor sit amet consectetur adipisicing elit. Laboriosam exercitationem quos consectetur expedita consequatur.' }
];
const sectionsProp = [
{
title: 'Section 1',
content: 'Lorem ipsum dolor sit amet consectetur adipisicing elit.',
},
{
title: 'Section 2',
content: 'Cupiditate tenetur aliquam necessitatibus id distinctio quas nihil ipsam nisi modi!',
},
{
title: 'Section 3',
content: 'Animi amet cumque sint cupiditate officia ab voluptatibus libero optio et?',
},
];
class App extends React.Component {
render() {
return (
<div className="App">
<TheDate />
<Counter
count={123}
step={5}
/>
<HelloWorld />
<Bomb />
<RouletteGun />
<Tabs
tabs={tabsProp}
/>
<Accordion
sections={sectionsProp}
/>
<ShoppingList />
</div>
)
}
}
export default App;
<file_sep>/src/components/ShoppingList/Card/Card.test.js
import React from 'react';
import ReactDOM from 'react-dom';
import Card from './Card';
import renderer from 'react-test-renderer';
import STORE from '../STORE.js';
describe('Card component', () => {
const store = {STORE};
const props = store.STORE.allCards['a'];
it('Renders without crashing', () => {
const div = document.createElement('div');
ReactDOM.render(
<Card
id={props.id}
title={props.title}
content={props.content}
/>, div);
ReactDOM.unmountComponentAtNode(div);
});
it('renders the UI as expected', () => {
const tree = renderer
.create(<Card
id={props.id}
title={props.title}
content={props.content}
onClickDelete={props.onClickDelete}
purchased={props.purchased}
/>)
.toJSON();
expect(tree).toMatchSnapshot();
});
})<file_sep>/src/components/ShoppingList/List/List.js
import React from 'react';
import Card from '../Card/Card';
import './List.css';
class List extends React.Component {
constructor(props){
super(props);
this.state = {
list: this.props
}
}
render() {
console.log(this.props);
return (
<section className="List">
<header className="List-header">
<h2>{this.props.title}</h2>
</header>
<div className="List-cards">
{this.props.cards.map((card, index) => {
if(!card.saved){
return (
<Card
key={this.props.title + "-id-" + card.id + "-index-" + index}
id={card.id}
title={card.title}
content={card.content}
purchased={false}
onClickDelete={this.props.onClickDelete}
onClickSavedCard={this.props.onClickSavedCard}
/>
)
} else {
return (null);
}
})}
<button
type="button"
className="List-add-button"
onClick={() => this.props.onClickAdd(this.props.id)}
>
+ Add Card
</button>
</div>
</section>
)
}
}
List.defaultProps = {
onClickAdd: () => {},
onClickSavedCard: () => {}
}
export default List; | 76ede309ac4acee7fa3f68df2f0e5ea99d5a3242 | [
"JavaScript",
"Markdown"
] | 7 | JavaScript | sinsys/state-drills | bea9a9a38d29df3aaf3edcd0d59edd1ee7695b5d | 71dc95445944bb9b79398b46ff58e436f4132270 |
refs/heads/master | <file_sep># Kotlin_FirstApplication
This is the first application made by Kotlin Programming Language
<file_sep>package com.example.chapter02
import kotlinx.coroutines.*
fun main(){
val scope = GlobalScope
runBlocking{
val a = launch{
for(i in 1..5){
println(i)
delay(10)
}
}
val b = async {
"async 종료"
}
println("async 대기")
println(b.await())
println("launch 취소")
a.cancel()
println("launch 종료")
}
runBlocking {
var result = withTimeoutOrNull(50){
for(i in 1..10){
println(i)
delay(10)
}
"Finish"
}
println(result)
}
}
| 7ab2fdbb5ead97736b1ba9f30180f08ec08df872 | [
"Markdown",
"Kotlin"
] | 2 | Markdown | GeonwooVincentKim/Kotlin_FirstApplication | f59f1f9b7e28c54377a926ec2f08d3cacd6dc4e6 | 6a1ccb1c6e48ea31a374f3bf75e3533d8e414363 |
refs/heads/main | <repo_name>DincerDogan/News-Application<file_sep>/README.md
# News-Application
## Çalıştırmak için
- yarn install
- expo start
<file_sep>/config/config.js
export const API_KEY = `sizin-apiniz`;
export const endpoint = `https://newsapi.org/v2/top-headlines`;
export const country = 'tr';
export const category='general';
| d6d4e20448c21a8025ec83df1f993f2491dee3f6 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | DincerDogan/News-Application | 4238496974cc3f486995d7167e1b7f2e877f7364 | 268ebcc52ebde35e2568aa7d576ebc42ace63459 |
refs/heads/master | <file_sep>package com.phd3.onesecond;
import java.io.File;
import java.io.IOException;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.Semaphore;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.MediaController;
import android.widget.Toast;
import android.widget.VideoView;
public class AndroidVideoCapture extends Activity {
private Camera myCamera;
private MyCameraSurfaceView myCameraSurfaceView;
private MediaRecorder mediaRecorder;
private String filename = "";
ImageButton myButton;
SurfaceHolder surfaceHolder;
boolean recording;
static Semaphore lock = new Semaphore(0);
TimerTask task;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
task = new TimerTask() {
@Override
public void run() {
lock.release();
}
};
recording = false;
setContentView(R.layout.activity_main);
// Get Camera for preview
myCamera = getCameraInstance();
if (myCamera == null) {
Toast.makeText(AndroidVideoCapture.this, "Fail to get Camera",
Toast.LENGTH_LONG).show();
}
myCameraSurfaceView = new MyCameraSurfaceView(this, myCamera);
MyFrameLayout myCameraPreview = (MyFrameLayout) findViewById(R.id.videoview);
myCameraPreview.addView(myCameraSurfaceView);
LayoutInflater controlInflater = LayoutInflater.from(getBaseContext());
View viewControl = controlInflater.inflate(R.layout.control, null);
LayoutParams layoutParamsControl = new LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT);
this.addContentView(viewControl, layoutParamsControl);
myButton = (ImageButton) findViewById(R.id.mybutton);
myButton.setAlpha(170);
myButton.setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
Timer t = new Timer();
v.setClickable(false);
v.setEnabled(false);
startRecording();
t.schedule(task, 1300);
try {
lock.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
playVideo();
stopRecording();
finish();
}
});
}
private void playVideo() {
Intent i=new Intent(getBaseContext(),VideoPlayer.class);
i.putExtra("filepath", filename);
startActivityForResult(i, 0);
}
private void startRecording() {
// Release Camera before MediaRecorder start
releaseCamera();
if (!prepareMediaRecorder()) {
Toast.makeText(AndroidVideoCapture.this,
"Fail in prepareMediaRecorder()!\n - Ended -",
Toast.LENGTH_LONG).show();
finish();
}
mediaRecorder.start();
}
private void stopRecording() {
// stop recording and release camera
mediaRecorder.stop(); // stop the recording
releaseMediaRecorder(); // release the MediaRecorder object
// Exit after saved
}
private Camera getCameraInstance() {
// TODO Auto-generated method stub
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
private boolean prepareMediaRecorder() {
myCamera = getCameraInstance();
mediaRecorder = new MediaRecorder();
myCamera.unlock();
mediaRecorder.setCamera(myCamera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_480P));
File f = new File("/sdcard/JustASecond");
if (!f.exists()){
f.mkdir();
}
filename = "/sdcard/JustASecond/Clip"+System.currentTimeMillis()+".mp4";
mediaRecorder.setOutputFile(filename);
mediaRecorder.setMaxDuration(1000); // Set max duration 1 sec.
mediaRecorder.setMaxFileSize(5000000); // Set max file size 5M
mediaRecorder.setPreviewDisplay(myCameraSurfaceView.getHolder()
.getSurface());
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
releaseMediaRecorder();
return false;
} catch (IOException e) {
releaseMediaRecorder();
return false;
}
return true;
}
@Override
protected void onPause() {
super.onPause();
releaseMediaRecorder(); // if you are using MediaRecorder, release it
// first
releaseCamera(); // release the camera immediately on pause event
}
private void releaseMediaRecorder() {
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
myCamera.lock(); // lock camera for later use
}
}
private void releaseCamera() {
if (myCamera != null) {
myCamera.release(); // release the camera for other applications
myCamera = null;
}
}
public class MyCameraSurfaceView extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public MyCameraSurfaceView(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format,
int weight, int height) {
// If your preview can change or rotate, take care of those events
// here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// make any resize, rotate or reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
// The Surface has been created, now tell the camera where to draw
// the preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
}
}<file_sep>package com.phd3.onesecond;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.FrameLayout;
public class MyFrameLayout extends FrameLayout {
public int height;
public int width;
public MyFrameLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public MyFrameLayout(Context context, AttributeSet attrs) {
super(context, attrs);
}
public MyFrameLayout(Context context) {
super(context);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
width = MeasureSpec.getSize(widthMeasureSpec);
width = MeasureSpec.getSize(MeasureSpec.AT_MOST);
width = MeasureSpec.getSize(MeasureSpec.EXACTLY);
height = MeasureSpec.getSize(widthMeasureSpec);
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}
<file_sep>package com.phd3.onesecond;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.Toast;
import android.support.v4.app.NavUtils;
public class MainActivity extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
ImageButton myButton = (ImageButton) findViewById(R.id.mybutton);
myButton.setOnClickListener(myButtonOnClickListener);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_main, menu);
return true;
}
Button.OnClickListener myButtonOnClickListener = new Button.OnClickListener() {
@Override
public void onClick(View v) {
Intent myIntent = new Intent(v.getContext(), AndroidVideoCapture.class);
startActivityForResult(myIntent, 0);
}
};
} | 93dacb71508acb2374ca7dd97e46c10ab238c450 | [
"Java"
] | 3 | Java | benperez/PhotoHackDayProject | 89bebefa9beadb695e0a8d5cf947210005088665 | d3c3886d6f430b026ab8b46be24fbdd510d8af79 |
refs/heads/master | <file_sep>
const mongoose = require('mongoose')
const express = require('express')
const BlogModel = mongoose.model('Blog')
const shortId = require('shortid');
const generateResponse = require('../libs/responseLib')
const timeLib = require('../libs/timeLib')
const checkLib = require('../libs/checkLib')
const logger = require('../libs/loggerLib')
let getAllBlogs = (req, res) => {
BlogModel.find()
.select('-__v -_id')
.lean()
.exec((err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:getAllBlogs',10)
let apiResponse = generateResponse.generate(true, 'Some Error Occured !!', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
let apiResponse = generateResponse.generate(ture, 'No Data Found!!', 404, err)
res.send(apiResponse)
logger.captureInfo('No Blog Found!!','controller:getAllBlogs',10)
} else {
logger.captureInfo('Blog Found Successfully!!','controller:getAllBlogs',10)
let apiResponse = generateResponse.generate(false, 'Blogs Found Successfully !!', 200, result)
res.send(apiResponse)
}
})
}
let viewBlogById = (req, res) => {
console.log(req.user)
BlogModel.findOne({ 'blogId': req.params.blogId }, (err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:viewBlogById',10)
let apiResponse = generateResponse.generate(true, 'Some Error Occured', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
let apiResponse = generateResponse.generate(true, 'Blog Not Found', 404, err)
res.send(apiResponse)
logger.captureInfo('No Blog Found!!','controller:viewBlogById',10)
} else {
logger.captureInfo('Blog Found Successfully!!','controller:viewBlogById',10)
let apiResponse = generateResponse.generate(false, 'Blog Found Sucessfully !!', 200, result)
res.send(apiResponse)
}
})
}
let viewBlogByAuthor = (req, res) => {
BlogModel.find({ 'author': req.params.author }, (err, result) => {
if (err) {
logger.captureerror(`Some Error Occured:${err}`, 'controller:viewBlogByAuthor',10)
let apiResponse = generateResponse.generate(true, 'Some Error Occured', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
let apiResponse = generateResponse.generate(false, 'No Blog Found !!', 404, err)
res.send(apiResponse)
logger.captureInfo('No Blog Found!!','controller:viewBlogByAuthor',10)
} else {
logger.captureInfo('Blog Found Successfully!!','controller:viewBlogByAuthor',10)
let apiResponse = generateResponse.generate(true, 'Blog Found Successfully!!', 200, result)
res.send(apiResponse)
}
})
}
let viewBlogByCategory = (req, res) => {
BlogModel.find({ 'category': req.params.category }, (err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:viewBlogByCategory',10)
let apiResponse = generateResponse.generate(true, 'Some Error Occured', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
let apiResponse = generateResponse.generate(true, 'No Blog Found', 404, err)
res.send(apiResponse)
logger.captureInfo('No Blog Found!!','controller:viewBlogByCategory',10)
} else {
logger.captureInfo('Blog Found Successfully!!','controller:viewBlogByCategory',10)
let apiResponse = generateResponse.generate(false, 'Blog Found Successfully!!', 200, result)
res.send(apiResponse)
}
})
}
let deleteBlog = (req, res) => {
BlogModel.remove({ 'blogId': req.params.blogId }, (err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:deleteBlog',10)
let apiResponse = generateResponse.generate(true, 'Some Error Occured !!', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
let apiResponse = generateResponse.generate(true, 'No Blog Found', 404, err)
res.send(apiResponse)
logger.captureInfo('No Blog Found!!','controller:deleteBlog',10)
} else {
logger.captureInfo('Blog Deleted Successfully !!','controller:deleteBlog',10)
let apiResponse = generateResponse.generate(false, 'Blog Deleted Successfully !!', 200, result)
res.send(apiResponse)
}
})
}
let editBlog = (req, res) => {
let options = req.body;
BlogModel.update({ 'blogId': req.params.blogId }, options, { multi: true }).exec((err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:editBlog',10)
let apiResponse = generateResponse.generate(true, 'Some error Occured!!', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
logger.captureInfo('No Blog Found!!','controller:editBlog',10)
let apiResponse = generateResponse.generate(true, 'No Blog Found', 404, err)
res.send(apiResponse)
} else {
logger.captureInfo('Blog edited Successfully !!','controller:editBlog',10)
let apiResponse = generateResponse.generate(false, 'Blog edited Successfully !!', 200, result)
res.send(apiResponse)
}
});
}
let increaseBlogView = (req, res) => {
BlogModel.findOne({ 'blogId': req.params.blogId }, (err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:increaseBlogView',10)
let apiResponse = generateResponse.generate(true, 'Some error Occured !!', 500, err)
res.send(apiResponse)
} else if (checkLib.isEmpty(result)) {
logger.captureInfo('No Blog Found!!','controller:editBlog',10)
let apiResponse = generateResponse.generate(true, 'No Blog Found', 404, err)
res.send(apiResponse)
} else {
result.views += 1;
result.save((err, result) => {
if (err) {
logger.captureerror('Some Error Occured', 'controller:increaseBlogView',5)
let apiResponse = generateResponse.generate(true, 'Some error Occured !!', 500, err)
res.send(apiResponse)
}
else {
logger.captureInfo('Blog updated successfully','controller:increaseBlogView',10)
let apiResponse = generateResponse.generate(false, 'Blog Views increased Successfully!!', 200, result)
res.send(apiResponse)
}
})
}
})
}
let createBlog = (req, res) => {
var today = timeLib.convertToLocalTime;
const blogId = shortId.generate();
let newBlog = new BlogModel({
blogId: blogId,
title: req.body.title,
description: req.body.description,
bodyHtml: req.body.blogBody,
isPublished: true,
category: req.body.category,
author: req.body.fullName,
created: today,
lastModified: today
})
let tags = ((req.body.tags !== undefined && req.body.tags !== '' && req.body.tags !== null) ? req.body.tags.split(',') : []);
newBlog.tags = tags;
newBlog.save((err, result) => {
if (err) {
logger.captureerror(`Some Error Occured ${err}`, 'controller:createBlog',10)
let apiResponse = generateResponse.generate(true, 'Some Error Occured', 500, err)
res.send(apiResponse)
} else {
let apiResponse = generateResponse.generate(false, 'Blog Created Successfully!!', 200, result)
res.send(apiResponse)
}
})
}
module.exports = {
getAllBlogs: getAllBlogs,
viewBlogByAuthor: viewBlogByAuthor,
viewBlogByCategory: viewBlogByCategory,
viewBlogById: viewBlogById,
editBlog: editBlog,
deleteBlog: deleteBlog,
increaseBlogView: increaseBlogView,
createBlog: createBlog
}<file_sep>
let exampleMiddleWare = (req, res, next) => {
req.user = { 'firstName': "mike", 'lastName': "tyson" }
next()
}
module.exports = {
exampleMiddleWare: exampleMiddleWare
}<file_sep>
let errorHandler = (err, req, res, next) => {
console.log('Application Error handler occured')
console.log(err)
let apiResponse = generateResponse.generate(true, 'Some error occured at global level!', 500, err)
res.send(apiResponse)
}
let notFoundHandler = (req, res, next) => {
console.log('Global Not Found handler called')
let apiResponse = generateResponse.generate(true, 'Route not found in the application', 404, err)
res.send(apiResponse)
}
module.exports = {
errorHandler: errorHandler,
notFoundHandler: notFoundHandler
}
<file_sep># REST-Apis-for-CRUD
A sample application for creating REST Apis for CRUD operations of a blog
Apis for
creating the blog ,
deleting the blog,
editing the blog,
reading the blog : byAuthor, byCategory ,byId,
Updating the views of blog
are designed using NodeJS and MongoDB,Mongoose.
<file_sep>const express = require('express')
const auth = require('../middlewares/auth')
const blogController = require('../controllers/blogController')
const appConfig = require('../config/appConfig')
let exampleMiddleWare = require('./../middlewares/example')
let setRouter = (app) => {
// app.get('/test/route/:param1/:param2', controller.testRoute);
// app.get('/test/query',controller.testQuery);
// app.post('/test/body',controller.testBody);
let baseUrl = appConfig.apiVersion + '/blogs';
app.get(baseUrl + '/all', auth.isAuthenticated ,blogController.getAllBlogs);
/**
* @api {get} /api/v1/blogs/view/byAuthor/:author Get Blog by Author
* @apiVersion 0.0.1
* @apiName Get all Blogs
* @apiGroup read
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
*
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": " All Blogs Found Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.get(baseUrl + '/view/byCategory/:category',auth.isAuthenticated, blogController.viewBlogByCategory);
/**
* @api {get} /api/v1/blogs/view/byCategory/:category Get Blog by category
* @apiVersion 0.0.1
* @apiName Get Blog by category
* @apiGroup read
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
* @apiParam {String} category category of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog found Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.get(baseUrl + '/view/:blogId', auth.isAuthenticated, blogController.viewBlogById);
/**
* @api {get} /api/v1/blogs/view/:blogId Get Blog by Author
* @apiVersion 0.0.1
* @apiName Get Blog by blogId
* @apiGroup read
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
* @apiParam {blogId} blogId blogId of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Found Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.get(baseUrl + '/view/byCategory/:category', auth.isAuthenticated, blogController.viewBlogByCategory);
/**
* @api {get} /api/v1/blogs/view/byCategory/:category Get Blog by category
* @apiVersion 0.0.1
* @apiName Get Blog by category
* @apiGroup read
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
* @apiParam {String} category category of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Found Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.get(baseUrl + '/view/byAuthor/:author', auth.isAuthenticated, blogController.viewBlogByAuthor);
/**
* @api {get} /api/v1/blogs/view/byAuthor/:author Get Blog by Author
* @apiVersion 0.0.1
* @apiName Get Blog by Author
* @apiGroup read
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
* @apiParam {author} author Author of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Found Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.get(baseUrl + '/view/byCategory/:category', auth.isAuthenticated, blogController.viewBlogByCategory);
/**
* @api {get} /api/v1/blogs/view/byCategory/:category Get Blog by category
* @apiVersion 0.0.1
* @apiName Get Blog by category
* @apiGroup read
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
* @apiParam {String} category category of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Deleted Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.put(baseUrl + '/edit/:blogId',auth.isAuthenticated, blogController.editBlog);
/**
* @api {put} /api/v1/blogs/edit/:blogId Edit Blog
* @apiVersion 0.0.1
* @apiName Edit Blog
* @apiGroup edit
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param,body param or header)
* @apiParam {String} blogId blogId of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Edited Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.post(baseUrl + '/delete/:blogId',auth.isAuthenticated, blogController.deleteBlog);
/**
* @api {post} /api/v1/blogs/delete/:blogId Delete Blog
* @apiVersion 0.0.1
* @apiName delete Blog
* @apiGroup Delete
*
* @apiParam {String} authToken The token for authentication.(Send authToken as a query Param)
* @apiParam {String} blogId blogId of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Deleted Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.post(baseUrl + '/create', auth.isAuthenticated, blogController.createBlog);
/**
* @api {post} /api/v1/blogs/create Create Blog
* @apiVersion 0.0.1
* @apiName Create Blog
* @apiGroup Create
*
* @apiParam {String} authToken The token for authentication.
* @apiParam {String} title Title of the blog passed as a body parameter.
* @apiParam {String} description Description of the blog passed as a body parameter.
* @apiParam {String} blogBody blogBody of the blog passed as a body parameter.
* @apiParam {String} category category of the blog passed as a body parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Created Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
app.get(baseUrl + '/:blogId/count/view',auth.isAuthenticated, blogController.increaseBlogView);
/**
* @api {get} /api/v1/blogs/:blogId/count/view Create Blog
* @apiVersion 0.0.1
* @apiName Increase view Count
* @apiGroup update
*
* @apiParam {String} authToken The token for authentication.
* @apiParam {String} blogId blogId of the blog passed as a URL parameter.
*
* @apiSuccessExample {json} Success-Response:
* {
* "error": false,
* "message": "Blog Updates Successfully",
* "status": 200,
* "data" : [
* {
* blogId : "string",
* title:"string",
* description:"string",
* bodyHtml : "string",
* views: number,
* isPublished : boolean,
* category:"string"
* author : "string",
* tags : object(type=array),
* created : "date",
* lastModified: "date"
*
* }
* ]
* }
*
*
*
* @apiErrorExample {json} Error-Response:
*
* {
* "error": "trur",
* "message": "Error Occured",
* "status":500,
* "data":null
* }
*/
}
module.exports = {
setRouter: setRouter
} | fca6e86658cd851fd2f170628d280fb5114b5cbd | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | kslsoumya/REST-Apis-for-CRUD | f784f35646249826ee7618cbd0a3f79c96d5d3a2 | 711a4cadc8c4795fa823775a0708521eb87bc16c |
refs/heads/master | <repo_name>BaronLip/guessing-cli-chicago-web-062419<file_sep>/guessing_cli.rb
require "pry"
def exit
puts "Goodbye!"
end
def run_guessing_game
input = ""
while input do
puts "Guess a number between 1 and 6."
random_number = rand(1..6).to_s
input = gets.chomp
if input == "exit"
exit
break
elsif input == random_number
puts "You guessed the correct number!"
elsif input != random_number
puts "The computer guessed #{random_number}."
end
end
end
| f3b523104577075b6e2b312a21642d7cf484def3 | [
"Ruby"
] | 1 | Ruby | BaronLip/guessing-cli-chicago-web-062419 | b61e18f94a1f216b4868594c1aea18422920418c | 40cdbb682af64597c9d5f9847dc149598556bef1 |
refs/heads/main | <file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace CircoApp3
{
public partial class FrmPrincipal : Form
{
public FrmPrincipal()
{
InitializeComponent();
}
private void btnSair_Click(object sender, EventArgs e)
{
this.Close();
}
private void pbxCEP_Click(object sender, EventArgs e)
{
FrmBuscaCEP busca = new FrmBuscaCEP();
busca.Show();
}
}
}
<file_sep># CircodeAppsTDS06
Conjunto de aplicativos
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Text.RegularExpressions;
using System.Net;
using System.IO;
namespace CircoApp3
{
public partial class FrmBuscaCEP : Form
{
public FrmBuscaCEP()
{
InitializeComponent();
}
private void btnSair_Click(object sender, EventArgs e)
{
this.Close();
}
private void btnBuscar_Click(object sender, EventArgs e)
{
HttpWebRequest request = (HttpWebRequest)WebRequest.Create("https://viacep.com.br/ws/"+txtCEP.Text+"/json");
request.AllowAutoRedirect = false;
HttpWebResponse ChecaServidor = (HttpWebResponse)request.GetResponse();
if (ChecaServidor.StatusCode !=HttpStatusCode.OK)
{
MessageBox.Show("Servidor Indisponivel!");
return;//Sai da rotina e para e codificação
}
using(Stream webStrean = ChecaServidor.GetResponseStream())
{
if(webStrean !=null)
{
using (StreamReader responseReader = new StreamReader(webStrean))
{
string response = responseReader.ReadToEnd();
response = Regex.Replace(response, "[{},]", string.Empty);
response = response.Replace("\"", "");
String[] substrings = response.Split('\n');
int cont = 0;
foreach (var substring in substrings)
{
if (cont == 1)
{
string[] valor = substring.Split(":".ToCharArray());
if (valor[0] == " erro")
{
MessageBox.Show("CEP não encontrado!");
txtCEP.Focus();
return;
}
}
//endereço
if (cont == 2)
{
string[] valor = substring.Split(":".ToCharArray());
lblEndereco.Text = valor[1];
}
//complmento
if (cont == 3)
{
string[] valor = substring.Split(":".ToCharArray());
lblComplemento.Text = valor[1];
}
//bairro
if (cont == 4)
{
string[] valor = substring.Split(":".ToCharArray());
lblBairro.Text = valor[1];
}
//Cidade
if (cont == 5)
{
string[] valor = substring.Split(":".ToCharArray());
lblCidade.Text = valor[1];
}
//UF
if (cont == 6)
{
string[] valor = substring.Split(":".ToCharArray());
lblUF.Text = valor[1];
}
cont++;
}
}
}
}
}
}
}
| 23215697839662f083ffec795aeb43a7a1a94335 | [
"Markdown",
"C#"
] | 3 | C# | paulogomes1987/CircodeAppsTDS06 | 27a8784f0d792e558e8a4cd93009781119d6b25b | 9777f2ce5615bcde48ee5107fdd120d5ee068d9a |
refs/heads/master | <repo_name>AmbientXYZ/CarbonMod<file_sep>/CarbonMod.cs
using System.ComponentModel.Composition;
using Ambient;
using Ambient.Visuals;
using System.Windows.Forms;
using SharpDX;
using System;
namespace CarbonMod
{
[Export(typeof(CarbonPlugin))]
public class CarbonMod : CarbonPlugin
{
private TerrainGeneration terrainGenerator;
#region IPlugin Members
public void Initialize(World world)
{
terrainGenerator = new TerrainGeneration(world);
}
public string Author
{
get
{
return "Ambient Software";
}
}
public void GenerateChunk(
Int3 chunkOffset,
Voxel.Ownership owner,
out ushort[] blocks,
out ushort[] crust,
out byte[] precipitationData,
out bool containsVolcano,
out bool containsGeyser,
out bool containsPotentialSpring,
out World.LifeMode lifeMode)
{
terrainGenerator.GenerateChunk(chunkOffset, owner, out blocks, out crust, out precipitationData, out containsVolcano, out containsGeyser, out containsPotentialSpring, out lifeMode);
}
public Block GetSuitableLivingBlock(Random random,
int weightedPrecipitation,
float temperatureWinter,
float temperatureSummer,
float chaos,
Block.LivingGenerationModes vegetationType)
{
return terrainGenerator.GetSuitableLivingBlock(random, weightedPrecipitation, temperatureWinter, temperatureSummer, chaos, vegetationType);
}
#endregion
}
}
<file_sep>/LICENSE.md
### Copyright © 2017 Ambient Software
Ambient Software retains copyright over this source code.
This source is provided to assist in creating mods for Carbon. Any other use is prohibited. | d267469ac1383923b21f15aa73d4ac983d376f32 | [
"Markdown",
"C#"
] | 2 | C# | AmbientXYZ/CarbonMod | 7f151a64f81af8891c6293ead197937d59cce76f | e3522844c30e18a67a634cab6f718d649764f594 |
refs/heads/master | <file_sep># smart-farming
Proof of concept of an IoT-powered farm.

The setup.

A very simple dashboard to visualise the metrics (soil moisture level, temperature, light intensity).
<file_sep>/*
* Smart farming proof-of-concept using IoT
* Done by <NAME>, <NAME>, Fiona, Jack and Sam
* BPAS AY19/20 Term 1 - G9T1
*/
const int pinLight = A0; // Light sensor is at A0.
const int pinTemp = A1; // Define the pin to which the temperature sensor is connected.
const int pinLed = 7; // D7.
// Define the B-value of the thermistor.
// This value is a property of the thermistor used in the Grove - Temperature Sensor,
// and used to convert from the analog value it measures and a temperature value.
const int B = 3975;
// Defines the light-sensor threshold value below which the LED will turn on.
// Decrease this value to make the device more sensitive to ambient light, or vice-versa.
int lightTreshold = 400;
void setup() {
// Configure the serial communication line at 9600 baud (bits per second.)
Serial.begin(9600);
pinMode(pinLed, OUTPUT); // Sets this digital pin as output (LED)
}
void loop() {
// Temperature reading.
int temp = analogRead(pinTemp); // Get the (raw) value of the temperature sensor.
float resistance = (float)(1023-temp)*10000/temp; // Determine the current resistance of the thermistor based on the sensor value.
float temperature = 1/(log(resistance/10000)/B+1/298.15)-273.15; // Calculate the temperature based on the resistance value.
Serial.println("Temp: " + (String)temperature); // Print the temperature to the serial console.
delay(1000); // Wait one second between measurements.
int light = analogRead(pinLight); // Get raw value of temp sensor, it's an analog sensor!
Serial.println("Light intensity: " + (String)light);
if (light < lightTreshold) {
digitalWrite(pinLed, HIGH);
}
else {
digitalWrite(pinLed, LOW);
}
}
| 50d29e8742337d713ff32f7364c06307ba57e87f | [
"Markdown",
"C++"
] | 2 | Markdown | gjj/smart-farming | 4d2373468b888d1e16cbfa4b6a424bae6f20361e | c59e269beb757917acedac41ade9baf0c4d054db |
refs/heads/master | <repo_name>wendyLai/AuthenticatorAPP0629<file_sep>/app/src/main/java/com/admin/app0612/AddAccountActivity.java
package com.admin.app0612;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import com.admin.app0612.addaccount.ManuallyAddAccountActivity;
import com.google.zxing.client.android.CaptureActivity;
import java.util.List;
/**
* Created by Administrator on 2016/6/12.
*/
public class AddAccountActivity extends MyBottomBtnsActivity implements View.OnClickListener {
private Button mManuallyAddAccountBtn;
private Button mScanBarcodetBtn;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mySetPageContentView(R.layout.add_account_page_content);
mySetNextBtnDisabled(true);
mySetActionBarTitle(R.string.action_bar_title_add_account);
mScanBarcodetBtn = (Button) findViewById(R.id.btn_scan_barcode);
mManuallyAddAccountBtn = (Button) findViewById(R.id.btn_manually_add_account);
mScanBarcodetBtn.setOnClickListener(this);
mManuallyAddAccountBtn.setOnClickListener(this);
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.btn_scan_barcode:
scanBarcodeInside();
break;
case R.id.btn_manually_add_account:
manuallyAddAccount();
break;
}
}
/**
* 手动添加用户
*/
private void manuallyAddAccount() {
//Log.i("info", "手动添加用户");
Intent intent = new Intent(this, ManuallyAddAccountActivity.class);
startActivity(intent);
}
/**
* 内部启动扫描二维码
*/
private void scanBarcodeInside() {
Intent intent = new Intent();
intent.setClass(AddAccountActivity.this, CaptureActivity.class);
startActivityForResult(intent, Utilities.scanBarcodeResult);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case Utilities.scanBarcodeResult:
if (data==null){
break;
}
Log.i("info", "扫描结果获取:" + data.getStringExtra("result"));
decodeScanBarcodeResult(data.getStringExtra("result"));
break;
default:
break;
}
finish();
}
/**
* 解析扫描结果
*/
private void decodeScanBarcodeResult(String result) {
String username;
String url;
String secert;
url = result.substring(result.indexOf("totp/") + 5, result.indexOf("?"));
secert = result.substring(result.indexOf("?secret=") + 8, result.indexOf("&issuer="));
username = result.substring(result.indexOf("&issuer=") + 8, result.length());
Log.i("info", "username=" + username);
Log.i("info", "secert=" + secert);
Log.i("info", "url=" + url);
MainActivity.saveSecret(this, username, secert, url, null);
//跳转activity
Intent intent = new Intent(this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
/**
* 外部启动扫描二维码-启动zxing App
*/
public class startScanBarcodeOutside {
private Context context;
public startScanBarcodeOutside(Context context) {
this.context = context;
}
/**
* 外部启动扫描二维码-启动zxing
*/
private void scanBarcode() {
String zXingPackageName = getResources().getString(R.string.zxing_package_name);
boolean hasZXing = isPackageInstalled(context, zXingPackageName);
if (hasZXing) {
doStartApplicationWithPackageName(context, zXingPackageName);
} else {
//openBrowerDownload();
}
}
/**
* 启动扫描二维码-
* 判断设备中是否安装过指定包
*/
private boolean isPackageInstalled(Context mcontext, String packagename) {
PackageInfo packageInfo = null;
try {
packageInfo = mcontext.getPackageManager().getPackageInfo(packagename, 0);
} catch (PackageManager.NameNotFoundException e) {
packageInfo = null;
e.printStackTrace();
}
if (packageInfo == null) {
return false;
} else {
return true;
}
}
/**
* 启动扫描二维码-
* 下载ZXING
*/
private void openBrowerDownload() {
Uri uri = Uri.parse(getResources().getString(R.string.zxing_download_uri));
Intent intent = new Intent(Intent.ACTION_VIEW, uri);
startActivity(intent);
}
/**
* 启动扫描二维码-
* 获取包对应的class,并且跳转到到该应用中
*/
private void doStartApplicationWithPackageName(Context mcontext, String packagename) {
PackageInfo packageInfo = null;
try {
packageInfo = mcontext.getPackageManager().getPackageInfo(packagename, 0);
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
if (packageInfo == null) {
return;
}
// 创建一个类别为 CATEGORY_LAUNCHER 的该包名的 Intent
Intent resolveIntent = new Intent(Intent.ACTION_MAIN, null);
resolveIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
resolveIntent.addCategory(Intent.CATEGORY_LAUNCHER);
resolveIntent.setPackage(packageInfo.packageName);
// 通过 getPackageManager()的 queryIntentActivities 方法遍历
List<ResolveInfo> resolveinfoList = mcontext.getPackageManager()
.queryIntentActivities(resolveIntent, 0);
ResolveInfo resolveinfo = resolveinfoList.iterator().next();
if (resolveinfo != null) {
// packagename = 参数 packname
String packageName = resolveinfo.activityInfo.packageName;
// 这个就是我们要找的该 APP 的LAUNCHER 的 Activity[组织形式: packagename.mainActivityname]
String className = resolveinfo.activityInfo.name;
// LAUNCHER Intent
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addCategory(Intent.CATEGORY_LAUNCHER);
// 设置 ComponentName参数 1:packagename 参数2:MainActivity 路径
ComponentName componentName = new ComponentName(packageName, className);
intent.setComponent(componentName);
mcontext.startActivity(intent);
}
}
}
}
<file_sep>/app/src/main/java/com/admin/app0612/DependencyInjector.java
package com.admin.app0612;
import android.content.Context;
import org.apache.http.client.HttpClient;
import org.apache.http.conn.ClientConnectionManager;
/**
* 创建客户端用到的所有对象
* Created by Administrator on 2016/6/30.
*/
public final class DependencyInjector {
private static AccountDb sAccountDb;
private static Context sContext;
private static OptionalFeatures sOptionalFeatures;
private static OtpSource sOtpProvider;
private static Clock sClock;
private static HttpClient sHttpClient;
private DependencyInjector() {
}
/**
* 通过injector的创建得到上下文的实例化对象
*/
public static void setContext(Context context) {
sContext = context;
}
public static synchronized Context getContext() {
if (sContext == null) {
throw new IllegalStateException("Context not set");
}
return sContext;
}
/*************************** 数据库的相关操作 ***************************/
/**
* 通过injector设置一个用户列表的数据库,防止出现多个数据库
*/
public static synchronized void setAccountDb(AccountDb accountDb) {
if (sAccountDb != null) {
//sAccountDb.close();
}
sAccountDb = accountDb;
}
/**
* 得到用户列表数据库
*/
public static synchronized AccountDb getAccountDb() {
if (sAccountDb == null) {
sAccountDb = new AccountDb(getContext());
}
return sAccountDb;
}
/**
* 得到功能实例
*/
public static synchronized OptionalFeatures getOptionalFeatures() {
if (sOptionalFeatures == null) {
sOptionalFeatures = new BuildOptionalFeatures();
}
return sOptionalFeatures;
}
/**
* 计算otp值的实例
*/
public static synchronized void setOtpProvider(OtpSource otpProvider) {
sOtpProvider = otpProvider;
}
/**
* 得到计算otp值的实例
*/
public static synchronized OtpSource getOtpProvider() {
if (sOtpProvider == null) {
sOtpProvider = getOptionalFeatures().createOtpSource(getAccountDb(), getClock());
}
return sOtpProvider;
}
/**
* 得到clock实例
*/
public static synchronized void setClock(Clock clock) {
sClock = clock;
}
public static synchronized Clock getClock() {
if (sClock == null) {
sClock = new Clock(getContext());
}
return sClock;
}
/**
* httpclient
*/
public static synchronized void setHttpClient(HttpClient httpClient) {
sHttpClient = httpClient;
}
public static synchronized HttpClient getHttpClient() {
if (sHttpClient == null) {
sHttpClient = HttpClientFactory.createHttpClient(getContext());
}
return sHttpClient;
}
/**
* 关闭
*/
public static synchronized void close() {
if (sAccountDb != null) {
sAccountDb.close();
}
if (sHttpClient != null) {
ClientConnectionManager httpClientConnectionManager = sHttpClient.getConnectionManager();
if (httpClientConnectionManager != null) {
httpClientConnectionManager.shutdown();
}
}
sContext = null;
sAccountDb = null;
sOtpProvider = null;
sClock = null;
sHttpClient = null;
sOptionalFeatures = null;
}
}
<file_sep>/app/src/main/java/com/admin/app0612/CountTask.java
package com.admin.app0612;
import android.os.Handler;
/**
* 周期性的通知监听器的剩余时间,直到计数器的值改变
* Created by Administrator on 2016/7/1.
*/
public class CountTask implements Runnable {
private final Counter mCounter;
private final Clock mClock;
private final long mRemainingTimeNotificationPeriod;
private final Handler mHandler = new Handler();
private Listener mListener;
private boolean mShouldStop;
//上一次进入这里时的计数器值(默认是最小值)
private long mLastSeenCounterValue = Long.MIN_VALUE;
/**
* remainingTimeNotificationPeriod:时间图标更新显示样式的时间间隔(毫秒),这个任务通知其侦听器的剩余时间,直到更改它的值。
*/
public CountTask(Counter counter, Clock clock, long remainingTimeNotificationPeriod) {
this.mCounter = counter;
this.mClock = clock;
this.mRemainingTimeNotificationPeriod = remainingTimeNotificationPeriod;
}
/**
* 监听器 固定条件触发监听器事件
*/
interface Listener {
/**
* 当系统时间!=上次时间(即当前计数器值!=上次计数器值) 通知你该更新秘钥值
*/
void onCounterValueChanged();
/**
* 开启计数器
*/
void onCount(long millisRemaining);
}
/**
* 设置监听器
*/
void setListener(Listener listener) {
mListener = listener;
}
/**
* 关闭监听任务
*/
void stop() {
mShouldStop = true;
}
/**
* 开始监听任务
*/
public void startAndNotifyListener() {
if (mShouldStop) {
throw new IllegalStateException("Task already stopped and cannot be restarted.");
}
run();
}
/**
* 开启一个线程来监听计数器值改变
*/
@Override
public void run() {
if (mShouldStop) {
return;
}
//当前系统时间
long now = mClock.currentTimeMillis();
//当前计数器值
long counterValue = getCounterValue(now);
if (mLastSeenCounterValue != counterValue) {
//更新最后一次看见的计数器值
mLastSeenCounterValue = counterValue;
//改变计数器值
fireCounterValueChanged();
}
//开启计数器
fireCount(getTimeTillNextCounterValue(now));
//设置定时器,安排下一次触发
scheduleNextInvocation();
}
/**
* 开启计数器
*/
private void fireCount(long timeRemaining) {
//监听器不为空,不需要停止时,开启计数器
if ((mListener != null) && (!mShouldStop)) {
mListener.onCount(timeRemaining);
}
}
/**
* 改变计数器值
*/
private void fireCounterValueChanged() {
if ((mListener != null) && (!mShouldStop)) {
mListener.onCounterValueChanged();
}
}
/**
* 通过当前时间得到计数器的值
*/
private long getCounterValue(long time) {
return mCounter.getValueAtTime(Utilities.millisToSeconds(time));
}
/**
* 得到距离下一次计数器值改变的时间差
*/
private long getTimeTillNextCounterValue(long time) {
long currentValue = getCounterValue(time);
long nextValue = currentValue + 1;
long nextValueStartTime = Utilities.secondsToMillis(mCounter.getValueStartTime(nextValue));
return nextValueStartTime - time;
}
/**
* 设置一个定时器,安排下一次触发
*/
private void scheduleNextInvocation() {
long now = mClock.currentTimeMillis();
long counterValueAge = getCounterValueAge(now);
long timeTillNextInvocation =
mRemainingTimeNotificationPeriod - (counterValueAge % mRemainingTimeNotificationPeriod);
mHandler.postDelayed(this, timeTillNextInvocation);
}
/**
* 获取计数器的值
* todo??作用
*/
private long getCounterValueAge(long time) {
return time - Utilities.secondsToMillis(mCounter.getValueStartTime(getCounterValue(time)));
}
}
<file_sep>/app/src/main/java/com/admin/app0612/HowItWork1Activity.java
package com.admin.app0612;
import android.os.Bundle;
/**
* Created by Administrator on 2016/6/12.
*/
public class HowItWork1Activity extends MyBottomBtnsActivity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mySetPageContentView(R.layout.how_it_work1_page_content);
mySetActionBarDisabled(true);
}
@Override
protected void myOnNextPageBtnPressed() {
super.myOnNextPageBtnPressed();
myStartPageActivity(HowItWork2Activity.class);
}
}
<file_sep>/app/src/main/java/com/admin/app0612/OptionalFeatures.java
package com.admin.app0612;
/**
* 功能接口
* Created by Administrator on 2016/6/30.
*/
public interface OptionalFeatures {
/**
* Creates the {@link OtpSource} instance used for OTP generation by the app.
*/
OtpSource createOtpSource(AccountDb accountDb, Clock clock);
}
<file_sep>/app/src/main/java/com/admin/app0612/Clock.java
package com.admin.app0612;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
/**
* 基于当前系统时间
* Created by Administrator on 2016/7/1.
*/
public class Clock implements SharedPreferences.OnSharedPreferenceChangeListener {
// @VisibleForTesting
static final String PREFERENCE_KEY_OFFSET_MINUTES = "timeCorrectionMinutes";
private final SharedPreferences mPreferences;
private final Object mLock = new Object();
/**
* Cached value of time correction (in minutes) or {@code null} if not cached. The value is cached
* because it's read very frequently (once every 100ms) and is modified very infrequently.
*
* @GuardedBy {@link #mLock}
*/
private Integer mCachedCorrectionMinutes;
public Clock(Context context) {
mPreferences = PreferenceManager.getDefaultSharedPreferences(context);
mPreferences.registerOnSharedPreferenceChangeListener(this);
}
/**
* Gets the number of milliseconds since epoch.
*/
public long currentTimeMillis() {
return System.currentTimeMillis() + getTimeCorrectionMinutes() * Utilities.MINUTE_IN_MILLIS;
}
/**
* Gets the currently used time correction value.
*
* @return number of minutes by which this device is behind the correct time.
*/
public int getTimeCorrectionMinutes() {
synchronized (mLock) {
if (mCachedCorrectionMinutes == null) {
try {
mCachedCorrectionMinutes = mPreferences.getInt(PREFERENCE_KEY_OFFSET_MINUTES, 0);
} catch(ClassCastException e) {
mCachedCorrectionMinutes = Integer.valueOf(mPreferences.getString(PREFERENCE_KEY_OFFSET_MINUTES, "0"));
}
}
return mCachedCorrectionMinutes;
}
}
/**
* Sets the currently used time correction value.
*
* @param minutes number of minutes by which this device is behind the correct time.
*/
public void setTimeCorrectionMinutes(int minutes) {
synchronized (mLock) {
mPreferences.edit().putInt(PREFERENCE_KEY_OFFSET_MINUTES, minutes).commit();
// Invalidate the cache to force reading actual settings from time to time
mCachedCorrectionMinutes = null;
}
}
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (key.equals(PREFERENCE_KEY_OFFSET_MINUTES)) {
// Invalidate the cache
mCachedCorrectionMinutes = null;
}
}
}
| a24113bd3c1b9d1e04856127a3290c44e8e2453e | [
"Java"
] | 6 | Java | wendyLai/AuthenticatorAPP0629 | 0af6de6f2c9e190831a7b9a390df00db7a6dc59c | c2ba7bbcb5ef2467c83b142ec9b85399df0b3ee2 |
refs/heads/master | <repo_name>chengyijun/demo40<file_sep>/Weibo/Runtime/Cache/Admin/de7c5a34a8390cb05702ff28e49ff5ba.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML>
<html>
<head>
<meta charset="UTF-8" />
<title>后台首页</title>
</head>
<body>
<form method="post" action="<?php echo U('Login/index');?>">
<p>账号:<input type="text" name="user"/></p>
<p><input type="submit" value="登录"/></p>
</form>
</body>
</html><file_sep>/Weibo/Home/Model/UserModel.class.php
<?php
// 本类由系统自动生成,仅供测试用途
namespace Home\Model;
use Think\Model;
class UserModel extends Model\RelationModel {
public $user;
public $pass;
protected $_link = array(
'Card'=>array(
'mapping_type'=>self::HAS_ONE,
'foreign_key'=>'uid',
'mapping_fields'=>'code',
'as_fields'=>'code',
//'condition'=>'id=1',
//'mapping_name'=>'abc',
//'class_name'=>'Card',
),
/* 'Content'=>array(
'mapping_type'=>self::HAS_MANY,
'foreign_key'=>'uid',
'mapping_fields'=>'contents',
'mapping_order'=>'id DESC',
'mapping_limit'=>'0,2',
'mapping_name'=>'content',
),
*/
/* 'Role'=>array(
'mapping_type'=>self::MANY_TO_MANY,
'relation_table'=>'think_group',
'foreign_key'=>'uid',
'relation_foreign_key'=>'gid',
),
*/
);
// protected $insertFields = 'user';
// protected $updateFields = 'user';
// protected $_map = array(
// 'yonghu'=>'user',
// 'youxiang'=>'email'
// );
//开启自动验证
protected $patchValidate = true;
/* protected $_validate = array(
// array('user','require','用户名不得为空!'),
// array('user','email','邮箱格式不正确!'),
// array('user','url','url格式不正确!'),
// array('user','currency','货币格式不正确!'),
// array('user','zip','邮政编码格式不正确!'),
// array('user','number','不是正整数!'),
// array('user','integer','不是整数!'),
// array('user','double','不是浮点数!'),
// array('user','english','不是纯英文!'),
// array('user','/^\d{3,6}$/','不是3-6位的纯数字!',0,'regex'),
// array('user','程义军','传递过来的值不相等!',0,'equal'),
// array('user','程义军','传递过来的值必须不相等!',0,'notequal'),
// array('user','name','值不相等!',0,'confirm'),
// array('user','3','不得小于3位',0,'length'),
// array('user','3,5','长度不得小于3位且不得大于5位!',0,'length'),
// array('user',checkLength,'用户名长度不得小于3位且不得大于5位!',0,'callback',3,array(3,5)),
// array('user',checkLength,'用户名长度不得小于3位且不得大于5位!',0,'function',3,array(3,5)),
// array('email','email','邮箱格式不正确!'),
);*/
/* protected $_auto = array(
// array('count',1),
// array('user','sha1',3,'function'),
// array('user','email',3,'field'),
// array('user','updateUser',3,'callback','_'),
// array('user','updateUser',3,'function','_'),
array('user','',2,'ignore'),
);*/
/* protected function updateUser($str,$prefix){
return $prefix.$str;
}*/
/* protected function checkLength($str,$min,$max){
preg_match_all('/./u',$str,$matches);
$len = count($matches[0]);
if($len < $min || $len > $max){
return false;
}else{
return true;
}
}*/
public function checkLogin(){
return $this->where(array('user'=>$this->user))->where(array('pass'=>$this->pass))->find();
}
}<file_sep>/thinkphp.sql
/*
Navicat MySQL Data Transfer
Source Server : root
Source Server Version : 50540
Source Host : localhost:3306
Source Database : thinkphp
Target Server Type : MYSQL
Target Server Version : 50540
File Encoding : 65001
Date: 2016-02-16 22:45:55
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for `think_analysis`
-- ----------------------------
DROP TABLE IF EXISTS `think_analysis`;
CREATE TABLE `think_analysis` (
`id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`uid` mediumint(8) unsigned DEFAULT NULL,
`user` varchar(20) DEFAULT NULL,
`sum_offduty` float(10,1) DEFAULT NULL,
`sum_extra` float(10,1) DEFAULT NULL,
`sum_have` float(10,1) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_analysis
-- ----------------------------
INSERT INTO `think_analysis` VALUES ('2', '25', '程义军', '3.5', '30.2', '26.7');
INSERT INTO `think_analysis` VALUES ('3', '25', '程义军', '3.5', '30.2', '26.7');
-- ----------------------------
-- Table structure for `think_auth_group`
-- ----------------------------
DROP TABLE IF EXISTS `think_auth_group`;
CREATE TABLE `think_auth_group` (
`id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`title` char(100) NOT NULL DEFAULT '',
`status` tinyint(1) NOT NULL DEFAULT '1',
`rules` char(80) NOT NULL DEFAULT '',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_auth_group
-- ----------------------------
INSERT INTO `think_auth_group` VALUES ('1', '默认管理组', '1', '1,2,3,4,5');
-- ----------------------------
-- Table structure for `think_auth_group_access`
-- ----------------------------
DROP TABLE IF EXISTS `think_auth_group_access`;
CREATE TABLE `think_auth_group_access` (
`uid` mediumint(8) unsigned NOT NULL,
`group_id` mediumint(8) unsigned NOT NULL,
UNIQUE KEY `uid_group_id` (`uid`,`group_id`),
KEY `uid` (`uid`),
KEY `group_id` (`group_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_auth_group_access
-- ----------------------------
INSERT INTO `think_auth_group_access` VALUES ('3', '1');
-- ----------------------------
-- Table structure for `think_auth_rule`
-- ----------------------------
DROP TABLE IF EXISTS `think_auth_rule`;
CREATE TABLE `think_auth_rule` (
`id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`name` char(80) NOT NULL DEFAULT '',
`title` char(20) NOT NULL DEFAULT '',
`type` tinyint(1) NOT NULL DEFAULT '1',
`status` tinyint(1) NOT NULL DEFAULT '1',
`condition` char(100) NOT NULL DEFAULT '',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_auth_rule
-- ----------------------------
INSERT INTO `think_auth_rule` VALUES ('1', 'Admin/Index/index', '后台首页', '1', '1', '');
-- ----------------------------
-- Table structure for `think_card`
-- ----------------------------
DROP TABLE IF EXISTS `think_card`;
CREATE TABLE `think_card` (
`id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`code` char(6) DEFAULT NULL,
`uid` mediumint(8) unsigned DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=7 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_card
-- ----------------------------
INSERT INTO `think_card` VALUES ('1', '321551', '1');
INSERT INTO `think_card` VALUES ('2', '321552', '2');
INSERT INTO `think_card` VALUES ('3', '321559', '4');
INSERT INTO `think_card` VALUES ('6', '2346BB', '24');
-- ----------------------------
-- Table structure for `think_content`
-- ----------------------------
DROP TABLE IF EXISTS `think_content`;
CREATE TABLE `think_content` (
`id` smallint(6) unsigned NOT NULL AUTO_INCREMENT,
`contents` text,
`uid` mediumint(8) unsigned DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=6 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_content
-- ----------------------------
INSERT INTO `think_content` VALUES ('1', '我的第一条留言', '1');
INSERT INTO `think_content` VALUES ('2', '他的第一条留言', '2');
INSERT INTO `think_content` VALUES ('3', '我的第二条留言', '1');
INSERT INTO `think_content` VALUES ('4', '他的第二条留言', '2');
INSERT INTO `think_content` VALUES ('5', '我的第三条留言', '1');
-- ----------------------------
-- Table structure for `think_group`
-- ----------------------------
DROP TABLE IF EXISTS `think_group`;
CREATE TABLE `think_group` (
`uid` smallint(6) unsigned NOT NULL,
`gid` smallint(6) unsigned NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_group
-- ----------------------------
INSERT INTO `think_group` VALUES ('1', '1');
INSERT INTO `think_group` VALUES ('1', '2');
INSERT INTO `think_group` VALUES ('1', '3');
INSERT INTO `think_group` VALUES ('2', '2');
INSERT INTO `think_group` VALUES ('2', '3');
-- ----------------------------
-- Table structure for `think_role`
-- ----------------------------
DROP TABLE IF EXISTS `think_role`;
CREATE TABLE `think_role` (
`id` smallint(6) unsigned NOT NULL AUTO_INCREMENT,
`title` varchar(20) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_role
-- ----------------------------
INSERT INTO `think_role` VALUES ('1', '管理员');
INSERT INTO `think_role` VALUES ('2', '认证专员');
INSERT INTO `think_role` VALUES ('3', '审核专员');
-- ----------------------------
-- Table structure for `think_user`
-- ----------------------------
DROP TABLE IF EXISTS `think_user`;
CREATE TABLE `think_user` (
`id` smallint(6) unsigned NOT NULL AUTO_INCREMENT,
`user` varchar(20) DEFAULT NULL,
`pass` varchar(40) DEFAULT NULL,
`email` varchar(20) DEFAULT NULL,
`count` int(11) DEFAULT '0',
`date` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=38 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_user
-- ----------------------------
INSERT INTO `think_user` VALUES ('30', '桂绍彬', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('25', '程义军', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('26', '王娜娜', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('27', '王秀丽', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('28', '魏倩倩', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('29', '陆家林', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('31', '吕彬', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('32', '陈超', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('33', '董小芳', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('34', '年美玲', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('35', '韦春燕', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('36', '李路生', '123456', null, '0', null);
INSERT INTO `think_user` VALUES ('37', '杨吉凌', '123456', null, '0', null);
-- ----------------------------
-- Table structure for `think_work`
-- ----------------------------
DROP TABLE IF EXISTS `think_work`;
CREATE TABLE `think_work` (
`id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`uid` mediumint(8) unsigned DEFAULT NULL,
`type` char(1) DEFAULT NULL,
`time` float(10,1) DEFAULT NULL,
`start_time` datetime DEFAULT NULL,
`end_time` datetime DEFAULT NULL,
`reason` text,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=60 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of think_work
-- ----------------------------
INSERT INTO `think_work` VALUES ('15', '28', '1', '2.0', '2016-02-16 00:00:00', '2016-02-16 08:00:00', '周末加班的啊,测试众筹项目');
INSERT INTO `think_work` VALUES ('17', '28', '1', '3.0', '2016-02-01 00:00:00', '2016-02-02 00:00:00', '加班嗑瓜子');
INSERT INTO `think_work` VALUES ('53', '25', '0', '3.5', '2016-02-01 00:00:00', '2016-02-01 03:30:00', '我调休了3.5个小时');
INSERT INTO `think_work` VALUES ('54', '25', '1', '1.0', '2016-02-01 06:00:00', '2016-02-01 07:00:00', '张起灵');
INSERT INTO `think_work` VALUES ('21', '25', '1', '1.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '舒服舒服');
INSERT INTO `think_work` VALUES ('23', '25', '1', '1.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '我又加班了一个小时');
INSERT INTO `think_work` VALUES ('55', '25', '0', '2.0', '2016-02-02 13:00:00', '2016-02-02 15:00:00', '查文斌');
INSERT INTO `think_work` VALUES ('48', '25', '1', '3.0', '2016-02-01 00:00:00', '2016-02-01 03:40:00', '是否自动计算时间');
INSERT INTO `think_work` VALUES ('26', '25', '1', '3.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '我加班了3个小时了');
INSERT INTO `think_work` VALUES ('27', '25', '1', '1.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '是否');
INSERT INTO `think_work` VALUES ('28', '25', '1', '2.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', 'fff');
INSERT INTO `think_work` VALUES ('29', '25', '1', '3.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '加班及调休工时记录器,欢迎bug反馈 Email:<EMAIL> qq: 578575608 加班及调休工时记录器,欢迎bug反馈 Email:<EMAIL> qq: 578575608 加班及调休工时记录器,欢迎bug反馈 Email:<EMAIL> qq: 578575608 ');
INSERT INTO `think_work` VALUES ('30', '25', '1', '1.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '<b>我加粗了没</b>');
INSERT INTO `think_work` VALUES ('31', '25', '1', '2.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '<span style="red;">我红了没</span>');
INSERT INTO `think_work` VALUES ('56', '25', '1', '2.9', '2016-02-01 18:08:00', '2016-02-01 21:00:00', '我再加班一次啊');
INSERT INTO `think_work` VALUES ('33', '25', '1', '2.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '方法三个傻瓜傻瓜');
INSERT INTO `think_work` VALUES ('35', '25', '1', '2.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '三国杀');
INSERT INTO `think_work` VALUES ('37', '25', '1', '2.0', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '三国杀');
INSERT INTO `think_work` VALUES ('57', '25', '0', '3.5', '2016-02-01 08:00:00', '2016-02-01 11:30:00', '我调休一次');
INSERT INTO `think_work` VALUES ('58', '28', '1', '1.0', '2016-02-02 18:00:00', '2016-02-02 19:00:00', '加班#3');
INSERT INTO `think_work` VALUES ('59', '28', '0', '2.5', '2016-02-03 09:00:00', '2016-02-03 11:30:00', '调休#1');
INSERT INTO `think_work` VALUES ('51', '25', '1', '5.7', '2016-02-01 00:00:00', '2016-02-01 05:40:00', '调休啊');
INSERT INTO `think_work` VALUES ('52', '25', '1', '1.5', '2016-02-01 00:00:00', '2016-02-01 01:30:00', '今天加班了1.5个小时');
<file_sep>/Weibo/Home/Controller/UserController.class.php
<?php
// 本类由系统自动生成,仅供测试用途
namespace Home\Controller;
use Think\Controller;
header("Content-type: text/html; charset=utf-8");
class UserController extends Controller {
public function login(){
$user = D('User');
$user->user = $_POST['user'];
$user->pass = $_POST['pass'];
//print_r($user->select());
//print_r($_POST);
//exit();
$list = $user->checkLogin();
//echo $list;
if($list){
session('sess_user',$list['user']);
session('sess_uid',$list['id']);
$this->success('登陆成功!',U('Index/index'));
}else{
$this->error('账户或密码错误!');
}
}
}<file_sep>/Weibo/Runtime/Cache/Home/6379b6bd273e272a694ef6d9982c6072.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset='utf-8'>
<title>工时登记</title>
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/header.css" />
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/base.css" />
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/footer.css" />
<script type="text/javascript" src="/demo39/Public/My97DatePicker/WdatePicker.js"></script>
</head>
<body>
<div id="header">
欢迎您, <strong><?php echo ($sess_user); ?></strong> !
<?php if($sess_user != '游客'): ?><a href="<?php echo U('Login/logout');?>">退出</a><br/>
<a href="<?php echo U('Index/index');?>">首页</a><?php endif; ?>
</div>
<div id="main">
<form method="post" action="<?php echo U('Work/note');?>">
<p><input type="hidden" name="uid" value="<?php echo ($sess_uid); ?>" /></p>
<p>类型:<input type="radio" name="type" checked="checked" value="1"/>加班
<input type="radio" name="type" value="0" />调休
</p>
<p>原因:<textarea name="reason"></textarea> (* 必填 加班原因及工作内容等)</p>
<p>起始时间:<input type="text" name="start_time" id="d233" onFocus="WdatePicker({startDate:'%y-%M-01 00:00:00',
dateFmt:'yyyy-MM-dd HH:mm',alwaysUseStartDate:true})"/> (* 必填)</p>
<p>结束时间:<input type="text" name="end_time" id="d233" onFocus="WdatePicker({startDate:'%y-%M-01 00:00:00',
dateFmt:'yyyy-MM-dd HH:mm',alwaysUseStartDate:true})"/> (* 必填)</p>
<p><input type="submit" value="提交"/></p>
</form>
</div>
<div id="footer">
<span style="margin:0 0 0 120px;">加班及调休工时记录器,欢迎bug反馈
Email:<EMAIL>
qq: 578575608</span>
</div>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/518fe845aea48a05bcdd39778a5a1036.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8"/>
<title>前台登陆</title>
</head>
<body>
<form method="post" action="">
<p>账户:<input type="text" name="user" /></p>
<p>密码:<input type="password" name="pass" /></p>
<input type="submit" value="登陆" />
</form>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/c0e26a67c385745be74dba36332ce051.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>我是index标题</title>
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/base.css" />
</head>
<body>
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>我是index标题</title>
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/base.css" />
</head>
<body>
我是头文件main
</body>
</html>
<!--
<div name="header">
我是头文件
</div>
-->
我是index main
<div name="footer">
我是脚文件
</div>
</body>
</html><file_sep>/Weibo/Home/Controller/WorkController.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/15 0015
* Time: 17:02
*/
namespace Home\Controller;
use Think\Controller;
use Think\Page;
header("Content-type:text/html;charset=UTF-8");
class WorkController extends Controller{
public function note(){
if(IS_POST){
//print_r(I('post.'));
//exit;
$work = D('Work');
if(!$work->create()){
$this->error('数据验证失败!请重新填写'.print_r($work->getError()),U('Work/note'));
}else{
print_r($work->create());
$work->add();
$this->success('信息登记成功!',U('Work/work_list?p=1'));
}
}else{
$sess_uid = session('sess_uid');
if(!is_login($sess_uid)){
$this->error('非法操作!',U('Login/login'));
}
$sess_uid = session('sess_uid');
$sess_user = session('sess_user');
$this->assign('sess_uid',$sess_uid);
$this->assign('sess_user',$sess_user);
$this->display();
}
}
public function work_list(){
$sess_uid = session('sess_uid');
$sess_user = session('sess_user');
if(!is_login($sess_uid)){
$this->error('非法操作!',U('Login/login'));
}
$work = D('Work');
$list = $work->where(array('uid'=>$sess_uid))->order(array('id'=>'DESC'))->page($_GET['p'].',10')->select();
$count = $work->where(array('uid'=>$sess_uid))->count();
$page = new Page($count,'10');
$page->setConfig('first','首页');
$page->setConfig('prev','上一页');
$page->setConfig('next','下一页');
$page->setConfig('last','尾页');
$show = $page->show();
$sum_offduty = $work->where(array('uid'=>$sess_uid))->where(array('type'=>'0'))->sum('time');
$sum_extra = $work->where(array('uid'=>$sess_uid))->where(array('type'=>'1'))->sum('time');
$sum_have = $sum_extra- $sum_offduty;
if($sum_have>0){
$html_sum_have = '<strong style="font-size:20px;color:green;">'.$sum_have.'</strong>';
}else{
$html_sum_have = '<strong style="font-size:20px;color:red;">'.$sum_have.'</strong>';
}
$this->assign('sess_uid',$sess_uid);
$this->assign('sess_user',$sess_user);
$this->assign('list',$list);
$this->assign('page',$show);
$this->assign('sum_offduty',$sum_offduty);
$this->assign('sum_extra',$sum_extra);
$this->assign('sum_have',$html_sum_have);
$this->display();
}
}<file_sep>/Weibo/Home/Model/WorkModel.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/15 0015
* Time: 17:57
*/
namespace Home\Model;
use Think\Model;
class WorkModel extends Model {
//开启自动验证
protected $patchValidate = true;
protected $_validate = array(
array('reason','require','加班和调休原因必填!'),
array('start_time','require','开始时间必填!'),
array('end_time','require','结束时间必填必填!'),
array('start_time','checkStartTime','开始时间必须小于结束时间',1,'function',3),
array('end_time','checkEndTime','结束时间必须小于当前时间',1,'function',3),
);
protected $_auto = array (
array('time','have_time',1,'function'),
);
}<file_sep>/Weibo/Admin/Model/AnalysisModel.class.php
<?php
/**
* Created by PhpStorm.
* User: 程义军
* Date: 2016/2/16 0016
* Time: 17:13
*/
namespace Admin\Controller;
use Think\Model;
class AnalysisModel extends Model {
}<file_sep>/Weibo/Runtime/Cache/Home/4010d8f76d2c114d86d57f0ebbe5e94b.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset='utf-8'>
<title></title>
<!--
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="./Common/js/index.js"></script>
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
-->
</head>
<body>
<div id="header">这里是头文件</div>
<table border="1" cellspacing="0" style="table-layout:fixed;width:500px;text-align:center">
<tr><th>id</th><th>user</th><th>email</th></tr>
<?php if(is_array($list)): foreach($list as $key=>$obj): ?><tr>
<td><?php echo ($obj["id"]); ?></td>
<td><?php echo ($obj["user"]); ?></td>
<td><?php echo ($obj["email"]); ?></td>
</tr><?php endforeach; endif; ?>
</table>
<?php echo ($page); ?>
<div id="footer">这里是脚文件</div>
</body>
</html><file_sep>/Weibo/Home/Model/UserViewModel.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/14 0014
* Time: 16:14
*/
namespace Home\Model;
use Think\Model\ViewModel;
class UserViewModel extends ViewModel {
protected $viewFields = array(
'User'=>array('id','user','email','_type'=>'LEFT','count(Content.id)'=>'abc'),
//'Card'=>array('code','_on'=>'User.id=Card.uid'),
'Content'=>array('contents','_on'=>'User.id=Content.uid'),
);
}<file_sep>/Weibo/Home/Controller/TestController.class.php
<?php
/**
* Created by PhpStorm.
* User: 程义军
* Date: 2016/2/18 0018
* Time: 21:06
*/
namespace Home\Controller;
use Think\Controller;
class TestController extends Controller{
public function login(){
$this->display();
}
public function checkName(){
if($_POST['username'] == 'admin'){
$this->ajaxReturn(array('data'=>$_POST['username'],'info'=>'用户名正确了~','status'=>1));
}else{
$this->ajaxReturn(array('data'=>$_POST['username'],'info'=>'用户名错误了~','status'=>0));
}
}
public function checkLogin(){
/*
if(IS_POST){
$user = D('User');
$user->create();
if($user->add()){
$this->ajaxReturn(array('data'=>I('post.user'),'info'=>'ajax提交表单成功~','status'=>1));
}else{
$this->ajaxReturn(array('data'=>I('post.user'),'info'=>'ajax提交表单失败~','status'=>0));
}
}
*/
if($_POST['username'] == 'admin'){
$this->ajaxReturn(array('data'=>I('post.username'),'info'=>'用户名正确了~','status'=>1));
// success 方法返回
//$this->success('用户名正确~',true);
// 加载了 Js/Form/CheckForm.js 类库或提交了 ajax=1 隐藏表单元素
//$this->success('用户名正确~');
}else{
$this->ajaxReturn(array('data'=>I('post.username'),'info'=>'用户名错误了~','status'=>0));
// error 方法返回
//$this->error('用户名错误!',true);
// 加载了 Js/Form/CheckForm.js 类库或提交了 ajax=1 隐藏表单元素
//$this->error('用户名错误!');
}
}
}
?><file_sep>/ThinkPHP/Library/Think/Template/TagLib/Test.class.php
<?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK IT ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2013 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: liu21st <<EMAIL>>
// +----------------------------------------------------------------------
namespace Think\Template\TagLib;
use Think\Template\TagLib;
defined('THINK_PATH') or exit();
/**
* 自定义扩展标签(Test)库驱动
*/
class Test extends TagLib
{
// 标签定义
protected $tags = array(
// 标签定义: attr 属性列表 close 是否闭合(0 或者1 默认1) alias 标签别名 level 嵌套层次
'mytest' => array('attr' => 'color,border', 'close' => 1),
);
public function _mytest($tags,$content)
{
// color=color:red;border:1px solid #ccc
// <div style="color:red;border:1px solid #ccc">test</div>
$color = 'color:'.$tags['color'].';border:'.$tags['border'].'px solid #ccc';
$result = '<div style="'.$color.'">'.$content.'</div>';
return $result;
}
}
<file_sep>/Weibo/Runtime/Cache/Home/acf720d30e87498515577d0f0d0e4b6a.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
前台登录
</head>
<body>
<div name="header">
我是头文件
</div>
<div id="login">
<form method="post" action="">
<p>账 户:<input type="text" name="user" /></p>
<p>密 码:<input type="password" name="pass" /></p>
<p>验证码:<input type="text" name="code" />
<img style="height:30px;" src="<?php echo U('Login/verify_code');?>" alt="图片验证码1"
onClick="this.src=this.src+'?'+Math.random()"/>
</p>
<input type="submit" value="登陆" />
</form>
</div>
<div name="footer">
我是脚文件
</div>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/d019960442f8a6024e73de593f5e0648.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset='utf-8'>
<title>工时列表</title>
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/header.css" />
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/base.css" />
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/footer.css" />
<script type="text/javascript" src="/demo39/Public/My97DatePicker/WdatePicker.js"></script>
</head>
<body>
<div id="header">
欢迎您, <strong><?php echo ($sess_user); ?></strong> !
<?php if($sess_user != '游客'): ?><a href="<?php echo U('Login/logout');?>">退出</a><br/>
<a href="<?php echo U('Index/index');?>">首页</a><?php endif; ?>
</div>
<div id="main">
<table border="1" cellspacing="0" cellpadding="2" style="text-align:center;" >
<tr><th>编号</th><th>类型</th><th>时长</th><th>原因</th></tr>
<?php if(is_array($list)): foreach($list as $key=>$obj): ?><tr>
<td style="width:200px;"><?php echo ($key+1); ?></td>
<td style="width:200px;">
<?php if($obj["type"] == 1): ?>加班
<?php else: ?>调休<?php endif; ?>
</td>
<td style="width:200px;"><?php echo ($obj["time"]); ?></td>
<td style="width:600px;text-align: left"><a style="text-decoration: none;color:#666" href="" title="<?php echo ($obj["reason"]); ?>"><?php echo (mb_substr($obj["reason"],0,40,'utf-8')); ?></a></td>
</tr><?php endforeach; endif; ?>
</table>
<?php echo ($page); ?>
<p><div>累计调休时间:<?php echo ($sum_offduty); ?></div></p>
<p><div>累计加班时间:<?php echo ($sum_extra); ?></div></p>
<p><div>剩余可用调休时间:<?php echo ($sum_have); ?></div></p>
</div>
<div id="footer">
<span style="margin:0 0 0 120px;">加班及调休工时记录器,欢迎bug反馈
Email:<EMAIL>
qq: 578575608</span>
</div>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/9c26fa297d447d8f5c03eeac0d27be95.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset='utf-8'>
<title>首页</title>
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/header.css" />
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/base.css" />
<link rel="stylesheet" type="text/css" href="/demo39/Public/Mycss/footer.css" />
<script type="text/javascript" src="/demo39/Public/My97DatePicker/WdatePicker.js"></script>
</head>
<body>
<div id="header">
欢迎您, <strong><?php echo ($sess_user); ?></strong> !
<?php if($sess_user != '游客'): ?><a href="<?php echo U('Login/logout');?>">退出</a><br/>
<a href="<?php echo U('Index/index');?>">首页</a><?php endif; ?>
</div>
<div id="main">
<a href="<?php echo U('Work/note');?>">登记加班调休信息</a><br/>
<a href="<?php echo U('Work/work_list?p=1');?>">我的加班调休列表</a>
</div>
<div id="footer">
<span style="margin:0 0 0 120px;">加班及调休工时记录器,欢迎bug反馈
Email:<EMAIL>
qq: 578575608</span>
</div>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/7ee76877b7e4dc2d7b34d810d25e1feb.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>ThinkPHP Ajax 实现示例</title>
<script type="text/javascript" src="/demo39/Public/Js/Base.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/prototype.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/mootools.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/Ajax/ThinkAjax.js"></script>
<script language="JavaScript">
<!--
function checkName(){
ThinkAjax.send('/demo39/Home/Test/checkName','ajax=1&username='+$('username').value,'','result');
}
function loginCheck(){
ThinkAjax.sendForm('form1','/demo39/Home/Test/checkLogin',complete,'result');
}
function complete(response,status){
if (status==1)
{
// 提示信息
$('list').innerHTML = '<span style="color:blue">'+response+'你好!</span>';
}
}
//-->
</script>
</head>
<body>
<div>
<div id="result"></div>
<div id="list"></div>
<form name="login" id="form1" method="post">
<input type="hidden" name="ajax" value="1">
用户名: <input type="text" name="username" id="username" />
<input type="button" value="检查用户名" onClick="checkName()"><br />
密 码: <input type="password" name="pass" /><br />
<input type="button" onClick="loginCheck()" value="提 交" />
</form>
</div>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/aa90e591738b64b82a2ca7b8a3e3bdfe.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE html>
<html>
<head lang="zh-cn">
<meta charset="UTF-8">
<title>ThinkAjax DEMO</title>
<script type="text/javascript" src="/demo40/Public/Js/Base.js"></script>
<script type="text/javascript" src="/demo40/Public/Js/prototype.js"></script>
<script type="text/javascript" src="/demo40/Public/Js/mootools.js"></script>
<script type="text/javascript" src="/demo40/Public/Js/Ajax/ThinkAjax.js"></script>
</head>
<body>
<form>
<p>
用户:<input type="text" name="account" />
<input type="button" value="检查" onclick="ajaxAccount()" />
</p>
<p>密码:<input type="<PASSWORD>" name="password" /></p>
<p><input type="button" value="提交" onclick="ajaxForm()" /></p>
</form>
</body>
</html><file_sep>/Weibo/Home/Controller/MailController.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/3/15 0015
* Time: 16:12
*/
namespace Home\Controller;
use Think\Controller;
class MailController extends Controller {
public function index(){
vendor('SMTP');
vendor('PHPMailer');
$mail = new \PHPMailer();
$mail->SMTPSecure = 'ssl'; // 使用安全协议
echo think_send_mail('<EMAIL>','Tank','test主题','内容乱码了吗','');
}
}<file_sep>/Public/PublicAction.class.php
<?php
class PublicAction extends Action{
public function login(){
$this->display();
}
public function checkName(){
if($_POST['username'] == 'admin'){
$this->success('用户名正确~');
}else{
$this->error('用户名错误!');
}
}
public function checkLogin(){
if($_POST['username'] == 'admin'){
$this->ajaxReturn($_POST['username'],'用户名正确~',1);
// success 方法返回
//$this->success('用户名正确~',true);
// 加载了 Js/Form/CheckForm.js 类库或提交了 ajax=1 隐藏表单元素
//$this->success('用户名正确~');
}else{
$this->ajaxReturn('','用户名错误!',0);
// error 方法返回
//$this->error('用户名错误!',true);
// 加载了 Js/Form/CheckForm.js 类库或提交了 ajax=1 隐藏表单元素
//$this->error('用户名错误!');
}
}
}
?><file_sep>/Weibo/Common/Controller/AuthController.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/14 0014
* Time: 9:40
*/
namespace Common\Controller;
use Think\Controller;
use Think\Auth;
class AuthController extends Controller{
protected function _initialize(){
$sess_auth = session('auth');
//如果是超级管理员则什么都不做==给予所有权限
if($sess_auth['uid'] == 1){
return true;
}
//权限判断
$auth = new Auth();
if(!$auth->check(MODULE_NAME.'/'.CONTROLLER_NAME.'/'.ACTION_NAME,$sess_auth['uid'])){
$this->error('没有权限!',U('Login/index'));
}
}
}<file_sep>/Weibo/Runtime/Cache/Home/86a52f73f93a1e9d7653b66f393bc156.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset="utf-8" >
<title>JqueryAjax练习</title>
<script type="text/javascript" src="/demo39/Public/Js/Jquery/jquery.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/Jquery/jquery.form.js"></script>
<script language="JavaScript">
function checkName(){
$.post('/demo39/Home/Ajax/checkName',{
'username':$('#username').val()
},function(response,status,xhr){
$('#box').html(response.data+response.info).show().fadeOut(4000);
});
}
$(function(){
$('#myForm').ajaxForm({
beforeSubmit : checkForm,
success : complete,
dataType : 'json'
});
function checkForm(){
if($('#username').val() == ''){
$('#box').html('用户名不能为空!').show().fadeOut(4000);
$('#username').focus();
return false;
}
return true;
}
function complete(response,status,xhr){
$('#box').html(response.data+response.info).show().fadeOut(4000);
}
});
</script>
</head>
<body>
<form id="myForm" method="post" action="/demo39/Home/Ajax/checkForm">
<p><input type="text" name="username" id="username" />
<input type="button" value="检查账户" onclick="checkName()" />
</p>
<p><input type="password" name="password" /></p>
<p><input type="submit" value="提交" /></p>
</form>
<div id="box"></div>
<div id="msg"></div>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/5ca2f7a36dbddd45ddca0dc12cebd989.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>ThinkPHP Ajax 实现示例</title>
<script type="text/javascript" src="/demo39/Public/js/base.js"></script>
<script type="text/javascript" src="/demo39/Public/js/prototype.js"></script>
<script type="text/javascript" src="/demo39/Public/js/mootools.js"></script>
<script type="text/javascript" src="/demo39/Public/js/Ajax/ThinkAjax.js"></script>
<script type="text/javascript">
function checkTitle()
{
ThinkAjax.send('/demo39/Home/Public/checkTitle','ajax=1&title='+$('title').value,'','checkbox');
}
function checkForm(){
ThinkAjax.sendForm('myform','/demo39/Home/Public/checkForm',complete,'checkbox');
}
function complete(data,status){
if (status==1)
{
// 提示信息
$('list').innerHTML = '<span style="color:blue">'+data+'你好!</span>';
}
}
</script>
</head>
<body>
<form method="post" id="myform" >
<table>
<tbody>
<tr>
<td width="45" class="tRight">标题:</td>
<td>
<input type="text" id="title" name="title">
<input type="button" value="检查" onClick="checkTitle();">
</td>
<td>
<span id="checkbox"></span>
</td>
<td>
<div id="list"></div>
</td>
</tr>
</tbody>
</table>
</form>
</body>
</html><file_sep>/Weibo/Home/Controller/FreeController.class.php
<?php
/**
* Created by PhpStorm.
* User: 程义军
* Date: 2016/2/21 0021
* Time: 11:05
*/
namespace Home\Controller;
use Think\Controller;
use Think\Image;
class FreeController extends Controller {
public function index(){
$image = new Image();
$image->open('./2.png');
$image->thumb(150, 150)->save('./2.png');
}
}<file_sep>/Weibo/Common/Conf/config.php
<?php
return array(
//'配置项'=>'配置值'
/*
//PDO连接方式
'DB_TYPE' => 'pdo', // 数据库类型
'DB_USER' => 'query', // 用户名
'DB_PWD' => '<PASSWORD>', // 密码
'DB_PREFIX' => 'zt_', // 数据库表前缀
'DB_DSN' => 'mysql:host=192.168.0.101;dbname=zentao;port=3308;charset=UTF-8',
*/
//PDO连接方式
'DB_TYPE' => 'pdo', // 数据库类型
'DB_USER' => 'root', // 用户名
'DB_PWD' => '<PASSWORD>', // 密码
'DB_PREFIX' => 'think_', // 数据库表前缀
'DB_DSN' => 'mysql:host=localhost;dbname=thinkphp;port=3306;charset=UTF-8', //默认端口3306可省略
//页面调试工具
'SHOW_PAGE_TRACE' => true,
/*
//页面调试工具内容项
'TRACE_PAGE_TABS'=>array(
'base'=>'基本',
'file'=>'文件',
'think'=>'流程',
'error'=>'错误',
'sql'=>'SQL',
'debug'=>'调试',
'user'=>'用户'
),
*/
//默认视图层名称
//'DEFAULT_V_LAYER' => 'View',
//设置视图模板文件后缀
'TMPL_TEMPLATE_SUFFIX' => '.tpl',
//模板文件CONTROLLER_NAME与ACTION_NAME之间的分割符
//'TMPL_FILE_DEPR' => '_',
//设置视图层路径
//'VIEW_PATH' => './Public/',
//设置默认主题名
'DEFAULT_THEME' => 'default',
//'LAYOUT_ON' => true,
//'LAYOUT_NAME' => 'Public/layout',
/*
'TMPL_PARSE_STRING' => array(
'__PUBLIC__'=>'/Common',
'__UPLOAD__'=>'/Upload',
),
*/
// 自定义标签库(test) 驱动加载
//'TAGLIB_PRE_LOAD' => 'test',
//设置允许访问模块及默认访问模块
'MODULE_ALLOW_LIST' => array('Home','Admin'),
'DEFAULT_MODULE' => 'Home',
//配置路由
'URL_ROUTER_ON' => true,
'URL_ROUTE_RULES' => array(
//'u'=>'User/index',
//'u/:id'=>'User/index',
//'u/:aa/:bb/:cc'=>'User/index',
//':u/:id'=>'User/index',
//'u/:id\d'=>'User/index',
//'u/:id\d|md5'=>'User/index',
//'u/[:id\d]'=>'User/index',
//'u/:id$'=>'User/index',
// '/^u\/(\d{2})$/' => 'User/index?id=:1|md5'
/*
'u/:id'=>function ($id){
echo $id;
},
*/
/*
'/^u\/(\d{2})$/' =>function ($id){
echo $id;
}
*/
//'u/:id\d'=>'User/index',
//'/^u\/(\d{2})$/' => 'User/index?id=:1',
),
/*
//配置静态路由
'URL_MAP_RULES' => array(
'u/i'=>'User/index',
),
*/
//设置URL模式
//'URL_MODEL' => 0, //普通模式
//'URL_MODEL' => 1, //APTHINFO模式
'URL_MODEL' => 2, //重写模式 REWRITE
//'URL_MODEL' => 3, //兼容模式
//允许二级控制器
//'CONTROLLER_LEVEL' => 2,
//设置控制器后缀,防止关键字冲突
//'ACTION_SUFFIX' => 'Action',
//操作绑定到类 PS:当类结构复杂庞大时候使用
//'ACTION_BIND_CLASS' => true,
//设置URL中分页名 默认为'p'
//'VAR_PAGE'=>'page',
//sql语句缓存
//'DB_SQL_BUILD_CACHE'=>true,
/*
//设置静态缓存
'HTML_CACHE_ON' => true, // 开启静态缓存
'HTML_CACHE_TIME' => 60, // 全局静态缓存有效期(秒)
'HTML_FILE_SUFFIX' => '.html', // 设置静态缓存文件后缀
'HTML_CACHE_RULES' => array( // 定义静态缓存规则
// 定义格式1 数组方式
//'User:index' => array('{:module}_{:controller}_{:action}_{id}',60),
'User:index' => array('{:module}/{:controller}/{:action}/{id}',60),
// 定义格式2 字符串方式
//'User:index' => '123',
//'*' => array('{$_SERVER.REQUEST_URI|MD5}'),
)
*/
/*
//设置手动写日志
'LOG_RECORD' => true, // 开启日志记录
'LOG_LEVEL' =>'EMERG,ALERT,CRIT,ERR', // 只记录EMERG ALERT CRIT ERR 错误
*/
//邮件配置
'THINK_EMAIL' => array(
'SMTP_HOST' => 'smtp.qq.com', //SMTP服务器
'SMTP_PORT' => '465', //SMTP服务器端口
'SMTP_USER' => '<EMAIL>', //SMTP服务器用户名
'SMTP_PASS' => '<PASSWORD>#', //SMTP服务器密码
'FROM_EMAIL' => '<EMAIL>', //发件人EMAIL
'FROM_NAME' => 'cyjmmy', //发件人名称
'REPLY_EMAIL' => '', //回复EMAIL(留空则为发件人EMAIL)
'REPLY_NAME' => '', //回复名称(留空则为发件人名称)
),
);
<file_sep>/Weibo/Home/Model/ContentModel.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/14 0014
* Time: 15:06
*/
namespace Home\Model;
use Think\Model\RelationModel;
class ContentModel extends RelationModel {
protected $_link = array(
'User'=>array(
'mapping_type'=>self::BELONGS_TO,
'foreign_key'=>'uid',
'mapping_fields'=>'user',
'as_fields'=>'user',
),
);
}<file_sep>/Weibo/Runtime/Cache/Home/d401ede22b6e70f77102ab7f0061d257.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE html>
<html>
<head lang="zh-cn">
<meta charset="UTF-8">
<title>ThinkAjax DEMO</title>
<script type="text/javascript" src="/demo40/Public/Js/Base.js"></script>
<script type="text/javascript" src="/demo40/Public/Js/prototype.js"></script>
<script type="text/javascript" src="/demo40/Public/Js/mootools.js"></script>
<script type="text/javascript" src="/demo40/Public/Js/Ajax/ThinkAjax.js"></script>
<script type="text/javascript">
function ajaxAccount(){
ThinkAjax.send('/demo40/Home/Tpajax/checkAccount','ajax=1&account='+$('account').value,complete,'info');
}
function ajaxForm(){
ThinkAjax.sendForm('form1','/demo40/Home/Tpajax/checkForm',complete,'info');
}
function complete(data){
$('data').innerHTML = data;
}
</script>
</head>
<body>
<form id="form1">
<input type="hidden" name="ajax" value="1" />
<p>
用户:<input type="text" name="account" id="account" />
<input type="button" value="检查" onclick="ajaxAccount()" />
</p>
<p>密码:<input type="<PASSWORD>" name="password" /></p>
<p><input type="button" value="提交" onclick="ajaxForm()" /></p>
</form>
<div id="info"></div>
<div id="data"></div>
</body>
</html><file_sep>/Weibo/Home/Controller/AjaxController.class.php
<?php
/**
* Created by PhpStorm.
* User: 程义军
* Date: 2016/2/20 0020
* Time: 21:43
*/
namespace Home\Controller;
use Think\Controller;
class AjaxController extends Controller {
public function index(){
$this->display();
}
public function checkName(){
if(I('username') == 'cheng'){
$this->ajaxReturn(array('data'=>I('username'),'info'=>'用户名正确的','status'=>1));
}else{
$this->ajaxReturn(array('data'=>I('username'),'info'=>'用户名错误的','status'=>0));
}
}
public function checkForm(){
if(I('username') == 'cheng'){
$this->ajaxReturn(array('data'=>I('username'),'info'=>'用户名正确的','status'=>1));
}else{
$this->ajaxReturn(array('data'=>I('username'),'info'=>'用户名错误的','status'=>0));
}
}
}<file_sep>/Weibo/Runtime/Cache/Home/981da6aba14d432146ce2053ca93b85a.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset="utf-8" />
<title>加班登记</title>
<script type="text/javascript" src="/demo39/Public/My97DatePicker/WdatePicker.js"></script>
</head>
<body>
<form method="post" action="<?php echo U('Work/note');?>">
<p>时长:<input type="text" name="time" /></p>
<p>备注:<textarea name="reason"></textarea></p>
<p>起始时间:<input type="text" name="start_time" id="d233" onFocus="WdatePicker({startDate:'%y-%M-01 00:00:00',
dateFmt:'yyyy-MM-dd HH:mm',alwaysUseStartDate:true})"/></p>
<p>结束时间:<input type="text" name="end_time" id="d233" onFocus="WdatePicker({startDate:'%y-%M-01 00:00:00',
dateFmt:'yyyy-MM-dd HH:mm',alwaysUseStartDate:true})"/></p>
<p><input type="submit" value="提交"/></p>
</form>
</body>
</html><file_sep>/Weibo/Runtime/Cache/Home/5247350de53ebc884b304edb685542d9.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset='utf-8'>
<title>select title</title>
<!--
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="./Common/js/index.js"></script>
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
-->
</head>
<body>
<div id="header">这里是头文件</div>
<div id="footer">这里是脚文件</div>
</body>
</html><file_sep>/Weibo/Admin/Model/UserViewModel.class.php
<?php
/**
* Created by PhpStorm.
* User: 程义军
* Date: 2016/2/16 0016
* Time: 18:02
*/
namespace Admin\Model;
use Think\Model\ViewModel;
class UserViewModel extends ViewModel {
protected $viewFields = array(
'User'=>array('user','_type'=>'RIGHT','SUM(Work.time)'=>'sum'),
'Work'=>array('type','_on'=>'User.id=Work.uid'),
);
}<file_sep>/test2.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/3/2 0002
* Time: 9:29
*/
/**
* @param string $str
* @return array
* url字符串转化为键值对数组,传统做法
*/
function str2arr1 ($str)
{
$arr = explode("&",$str);
$r = array();
foreach ($arr as $val )
{
$t = explode("=",$val);
$r[$t[0]]= $t[1];
}
return $r;
}
/* 将一个字符串转变成键值对数组,高级做法
* @param : string str 要处理的字符串 $str ='TranAbbr=IPER&AcqSsn=000000073601&MercDtTm=20090615144037';
* @param : string sp 键值对分隔符
* @param : string kv 键值分隔符
* @return : array
*/
function str2arr ($str,$sp="&",$kv="=")
{
$arr = str_replace(array($kv,$sp),array('"=>"','","'),'array("'.$str.'")');
eval('\$arr =\"$arr\";'); //eval() 将普通字符串按PHP代码执行,在此处的作用就是解析出Array();
return $arr;
}
$str1 = 'user=yijun&sex=1&age=27';
$str2 = str2arr1($str1);
print_r($str2);
//$str2 = str2arr($str1);
//
//echo $str1;
//echo '<br/>';
//print_r($str2);
//
//$a = array();
//
//foreach($str2 as $key=>$val){
// $a[$key] .= $val;
//}
//
//print_r($a);<file_sep>/Weibo/Runtime/Cache/Home/f91ce1c3ef53dcfb7124bfce16ea5d93.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta charset='utf-8'>
<title></title>
<!--
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="./Common/js/index.js"></script>
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
<script type="text/javascript" src="/demo39/Public/js/index.js"></script>
<link rel="stylesheet" type="text/css" href="/demo39/Public/css/index.css" />
-->
</head>
<body>
<div id="header">这里是头文件</div>
<?php if(is_array($data)): $i = 0; $__LIST__ = $data;if( count($__LIST__)==0 ) : echo "" ;else: foreach($__LIST__ as $key=>$arr): $mod = ($i % 2 );++$i; echo ($arr["id"]); ?> ---- <?php echo ($arr["title"]); ?> <br/><?php endforeach; endif; else: echo "" ;endif; ?>
<div id="footer">这里是脚文件</div>
</body>
</html><file_sep>/Weibo/Home/Controller/TpajaxController.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/3/15 0015
* Time: 10:34
*/
namespace Home\Controller;
use Think\Controller;
class TpajaxController extends Controller {
public function checkAccount(){
if(IS_AJAX){
$account = $_POST['account'];
if($account == 'admin'){
$data = array();
$data['data'] = $account;
$data['info'] = '用户名正确'.$account;
$data['status'] = 1;
$this->ajaxReturn($data);
}else{
$data = array();
$data['data'] = $account;
$data['info'] = '用户名错误'.$account;
$data['status'] = 0;
$this->ajaxReturn($data);
}
}
$this->display();
}
public function checkForm(){
if(IS_AJAX){
$account = $_POST['account'];
$password = $_POST['password'];
if($account == 'admin'){
$data = array();
$data['data'] = $account.' - '.$password;
$data['info'] = '表单ajax提交成功';
$data['status'] = 1;
$this->ajaxReturn($data);
}else{
$data = array();
$data['data'] = $account.' - '.$password;
$data['info'] = '表单ajax提交失败';
$data['status'] = 0;
$this->ajaxReturn($data);
}
}
$this->display('checkAccount');
}
}<file_sep>/Weibo/Home/Controller/IndexController.class.php
<?php
// 本类由系统自动生成,仅供测试用途
namespace Home\Controller;
use Think\Controller;
header("Content-type:text/html;charset=UTF-8");
class IndexController extends Controller {
public function index() {
$sess_user = session('sess_user');
if($sess_user){
$this->assign('sess_user',$sess_user);
$this->display();
}else{
$this->error('非法操作!',U('Login/login'));
}
}
}<file_sep>/Weibo/Admin/Controller/UserController.class.php
<?php
/**
* Created by PhpStorm.
* User: 程义军
* Date: 2016/2/16 0016
* Time: 17:11
*/
namespace Admin\Controller;
use Think\Controller;
header("Content-type:text/html;charset=UTF-8");
class UserController extends Controller {
public function anal_list(){
$type = I('get.type');
$user = D('UserView');
$list = $user->where(array('type'=>$type))->group('user')->select();
$this->assign('list',$list);
$this->display();
}
}<file_sep>/Weibo/Runtime/Cache/Home/fa4d37af88e2ba40a802bbc36495acf7.php
<?php if (!defined('THINK_PATH')) exit();?><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>ThinkPHP Ajax 实现示例</title>
<script type="text/javascript" src="/demo39/Public/Js/Base.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/prototype.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/mootools.js"></script>
<script type="text/javascript" src="/demo39/Public/Js/Ajax/ThinkAjax.js"></script>
<script language="JavaScript">
function check(){
ThinkAjax.sendForm('form1','http://localhost:8070/demo39/Home/Public/checkLogin',complete,'result');
}
function complete(data,status){
if (status==1){
$('list').innerHTML = '<span style="color:blue">'+data+'你好!</span>';
}
}
</script>
</head>
<body>
<div id="result"></div>
<div id="list"></div>
<form name="login" id="form1" method="post"">
用户名: <input type="text" name="username" /><br />
<input type="button" onClick="check()" value="提 交" />
</form>
</body>
</html><file_sep>/test.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/24 0024
* Time: 13:47
*/
header("Content-type:text/html;charset=UTF-8");
interface iTemplate {
const MAX_SIZE = 5;
public function setVar($name,$var);
public function getHtml($template);
}
class Template implements iTemplate {
private $vars = array();
public function setVar($name,$var){
$this->vars[$name] = $var;
}
public function getHtml($template){
foreach($this->vars as $name=>$value){
$template = str_replace('{'.$name.'}',$value,$template);
}
return $template;
}
}
//echo Template::MAX_SIZE;
$tpl = new Template();
$tpl->setVar('$name','我');
$tplstr = '{$name}是中国人';
echo $tpl->getHtml($tplstr);
<file_sep>/Weibo/Home/Controller/LoginController.class.php
<?php
/**
* Created by PhpStorm.
* User: PCAUTOSERVE
* Date: 2016/2/15 0015
* Time: 9:15
*/
namespace Home\Controller;
use Think\Controller;
use Think\Verify;
header("Content-type:text/html;charset=UTF-8");
class LoginController extends Controller {
public function login(){
if(IS_POST){
$code = check_verify(I('post.code'),1);
if($code){
$user = A('User','Controller');
$user->login();
}else{
$this->error('验证码不正确,请重新输入!',U('Login/login'));
}
}else{
$this->assign('sess_user','游客');
$this->display();
}
}
public function logout(){
session('[destroy]');
$this->success('退出成功!跳转登陆页面...',U('Login/login'));
}
public function verify_code(){
$verify = new Verify();
$verify->length = 4;
// 设置验证码字符为纯数字
$verify->codeSet = '0123456789';
$verify->entry(1);
}
} | c96dc10fb9861b594162a5707d4ba678c6afc49a | [
"SQL",
"PHP"
] | 40 | PHP | chengyijun/demo40 | f7038f93064e57c006b3990c02456f651d3dcf59 | 10bc0a5c57e62952dce5cb4cc6ccedd4e5031fd2 |
refs/heads/master | <file_sep>
$(function()
{
$('#reused_form').submit(function(e)
{
e.preventDefault();
$form = $(this);
$('button[type="submit"]', $form).each(function()
{
$btn = $(this);
$btn.prop('type','button' );
$btn.prop('orig_label',$btn.text());
$btn.text('Message sent');
});
});
});
| 62ed411ad4ece1cbbe33de2817d89fe390d9bb81 | [
"JavaScript"
] | 1 | JavaScript | ranjithm2001/portfolio | 75c4aad3a810181203792bbfff6383abff28d5de | 3228e846fec40504c1c50d338f41c58040843259 |
refs/heads/master | <repo_name>hwdavr/Lottery-React<file_sep>/src/web3.js
// import web3 to replace the default web3 injected by MetaMask < 1.0.0
import Web3 from 'web3';
const web3 = new Web3(window.web3.currentProvider);
export default web3; | 247f171e26ef84dfbd7740811d7ad696c37e6e0f | [
"JavaScript"
] | 1 | JavaScript | hwdavr/Lottery-React | 29846bc677e95c40286e40a4d7f539b8a35bef1c | 83b4a9a0f4848c90c98396c6bb8fec36b1b55743 |
refs/heads/master | <repo_name>ppap75/WebTest<file_sep>/WebTestGit/Controllers/BPController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace WebTestGit.Controllers
{
public class BPController : Controller
{
// GET: BP
public ActionResult Index()
{
return View();
}
}
} | 771cdefa00605ea614e9b8c796238b7c3f0b1b2c | [
"C#"
] | 1 | C# | ppap75/WebTest | 12aa9da4b72ea4720b9d6e0fe16796989d79b23e | 3d30925ac9ddcd9008b993811a7fe599323e93b8 |
refs/heads/master | <repo_name>Xlorum/ZHKH<file_sep>/app/src/main/java/com/deathmanwowgmail/zhkh/Programm.java
package com.deathmanwowgmail.zhkh;
import java.sql.*;
public class Programm {
public static final String URL="jdbc:mysql://localhost:3306/spb";
public static final String USERNAME="root";
public static final String PUSSWORD=".<PASSWORD>.";
static public void main(String[] arg) throws Exception{
try {
HttpServer.mainHTTP();
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
}<file_sep>/app/src/main/java/com/deathmanwowgmail/zhkh/Main.java
package com.deathmanwowgmail.zhkh;
/**
* Created by yachlovek on 14.05.2016.
*/
public class Main {
static public void main (String[] args){
}
}
<file_sep>/app/src/main/java/com/deathmanwowgmail/zhkh/test.java
package com.deathmanwowgmail.zhkh;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.Socket;
import java.net.UnknownHostException;
public class test extends AppCompatActivity {
private Socket client;
private PrintWriter printwriter;
private EditText textField;
private Button button;
private String messsage;
String ip="192.168.127.12";
int port=8087;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
textField = (EditText) findViewById(R.id.textout);
button = (Button) findViewById(R.id.send);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
messsage = textField.getText().toString();
Socket client = null;
textField.setText("");
DataOutputStream dataOutputStream = null;
DataInputStream dataInputStream = null;
try {
client = new Socket("192.168.127.12", 8087);
client.setSoTimeout(100);
dataOutputStream = new DataOutputStream(client.getOutputStream());
dataInputStream = new DataInputStream(client.getInputStream());
dataOutputStream.writeUTF(messsage);
} catch (UnknownHostException e) {
e.printStackTrace();
} catch (IOException e) {
System.out.println("Got an IOException: " + e.getMessage());
}
}
});
}
}
| 4999375cc6916bd2dfd9c3361aacbd8ac5ff892e | [
"Java"
] | 3 | Java | Xlorum/ZHKH | 7edde95e9d4c4502327e04d5662b98eae5cd2af3 | c2d3a6bbeb4eed7200b9a981e8b6b684e909f80c |
refs/heads/master | <file_sep># Private Blockchain with RESTFUL API
This project is private Blockchain that store data locally using LevelDB integrated with RESTFUL API to GET and POST Blocks.
## Getting Started
These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
### Prerequisites
Installing Node and NPM is pretty straightforward using the installer package available from the (Node.js® web site).
```
https://nodejs.org/en/
```
### Installing
1. Clone the repository to your local computer.
2. Open the terminal and install the packages: `npm install`.
3. Run your application `node app.js`
4. Go to your browser and type: `http://localhost:8000/block`
5. Function initializeMockData() will add 10 blocks to Blockchain for testing.
## Running the tests
Use POSTMAN or CURL to send GET requests to the Blockchain by adding to the URL block height.
```
http://localhost:8000/block/[blockheight]
```
Example URL path:
http://localhost:8000/block/0, where '0' is the block height.
## Example GET Response
For URL, http://localhost:8000/block/0
```
X-Powered-By →Express
Content-Type →text/plain; charset=utf-8
Data →{"hash":"e4d04d5522c0a2d777695e8b374211fae3bf4f270f45924ce635682bb1b87e35","height":0,"body":"Test Block","time":"1541273025","previousBlockHash":""}
Connection →close
Content-Length →208
ETag →W/"d0-Nam5HnrdK6hqMyz5HW9XEqdxgVs"
Date →Sat, 03 Nov 2018 19:23:45 GMT
```
## Example POST Response
Example URL path:
http://localhost:8000/block/(Mydata) where (Mydata) is the block data.
Example For URL, http://localhost:8000/block/Foo
```
X-Powered-By →Express
Content-Type →text/plain; charset=utf-8
Data →{"hash":"5a4cfcb0eeb4ea09eeba722fec4fa8795cb1b8aef3c855f92f59eef4ee956a4e","height":21,"body":"Foo","time":"1541275030","previousBlockHash":"e4d04d5522c0a2d777695e8b374211fae3bf4f270f45924ce635682bb1b87e35"}
Connection →close
Content-Length →207
ETag →W/"cf-1dfORKZcSoeamE44HYEj6a4iUPs"
Date →Sat, 03 Nov 2018 19:57:11 GMT
```
## Built With
- [ExpressJs](https://expressjs.com) - The web framework used.
- [LevelDB](http://leveldb.org/) - Database.
- [Crypto-js](https://www.npmjs.com/package/crypto-js) - Used to hash blocks with SHA256.
<file_sep>/* ===== SHA256 with Crypto-js ===============================
| Learn more: Crypto-js: https://github.com/brix/crypto-js |
| =========================================================*/
const BlockClass = require("./Block.js");
const BlockchainClass = require("./Blockchain.js");
/* ===== Blockchain Class ==========================
| Class with a constructor for new blockchain |
| ================================================*/
class BlockchainController {
/**
* Constructor to create a new BlockController, you need to initialize here all your endpoints
* @param {*} app
*/
constructor(app) {
this.app = app;
this.chain = new BlockchainClass.Blockchain();
this.getBlockByIndex();
this.postNewBlock();
this.initializeMockData();
}
getBlockByIndex() {
this.app.get("/block/:index", (req, res) => {
this.chain.getBlockHeight().then(height => {
let count = JSON.parse(height);
let index = req.params.index;
if (count >= index) {
this.chain.getBlock(index).then(block => {
res.set(200);
res.set("Content-Type", "text/plain");
res.set("Data", block);
res.set("Connection", "close");
res.status(200).send(block);
});
} else {
res.status(404).send("Block Not Found!");
}
});
});
}
postNewBlock() {
let self = this;
return this.app.post("/block/:data", (req, res) => {
// Add your code here
let body = req.params.data;
if (body === "") {
res.status(415).send("Block Body is empty");
} else {
let newblock = new BlockClass.Block(body);
self.chain
.addBlock(newblock)
.then(block => {
res.set(200);
res.set("Content-Type", "text/plain");
res.set("Data", JSON.stringify(block));
res.set("Connection", "close");
res.status(200).send(JSON.stringify(block));
})
.catch(err => {
res.status(415).send("Something went wrong");
console.log(err);
});
}
});
}
initializeMockData() {
let self = this;
return this.chain.getBlockHeight().then(height => {
if (height === 0) {
(function theLoop(i) {
setTimeout(function() {
let blockAux = new BlockClass.Block(`Test Data #${i}`);
self.chain.addBlock(blockAux);
i++;
if (i < 10) theLoop(i);
}, 10000);
})(0);
}
});
}
}
/**
* Exporting the BlockController class
* @param {*} app
*/
module.exports = app => {
return new BlockchainController(app);
};
| fa301ba2bba88d881c49eab096a4c6fcf6e1a75c | [
"Markdown",
"JavaScript"
] | 2 | Markdown | lalghoul/privateBlockchain | f4ee0195c6770c0470f172ec4e55fabee8f6175a | 0898655f510b5ccf6ea77d6442cae0bb4f6275aa |
refs/heads/master | <repo_name>mrahmedmamdouh/E7gzlyKoraa<file_sep>/LoginActivity.java
package com.example.android.e7gzlykora;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import com.fasterxml.jackson.databind.deser.Deserializers;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.ValueEventListener;
/**
* Created by Kevin on 2-Mar-17.
*/
public class LoginActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
final EditText mobilephone = (EditText) findViewById(R.id.editMobilePhone);
Button signIn = (Button)findViewById(R.id.buttonLoginUser);
Button backuser = (Button)findViewById(R.id.buttonBackLogin);
TextView Signup = (TextView) findViewById(R.id.linkToSignUp);
signIn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String a = mobilephone.getText().toString().trim();
if(a.isEmpty() || a.length() < 10){
mobilephone.setError("Enter a valid mobile");
mobilephone.requestFocus();
return;
}
Intent intent = new Intent(LoginActivity.this, verifynumber.class);
intent.putExtra("mobile", a);
startActivity(intent);
}
});
backuser.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent i = new Intent(LoginActivity.this,identity.class);
startActivity(i);
}
});
Signup.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent i1 = new Intent(LoginActivity.this, Register.class);
startActivity(i1);
}
});
}
}
<file_sep>/MainActivity.java
package com.example.android.e7gzlykora;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.Toast;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
public class MainActivity extends AppCompatActivity {
private static final String TAG = MainActivity.class.getSimpleName();
private DatabaseReference mFirebaseDatabase;
private FirebaseDatabase mFirebaseInstance;
private String ownerId;
EditText a;
EditText b;
EditText c;
EditText d;
EditText e;
Spinner f;
Spinner g;
Object value;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final EditText a = (EditText) findViewById(R.id.editEmail);
final EditText b = (EditText) findViewById(R.id.editMobilePhone);
final EditText c = (EditText) findViewById(R.id.editEmail1);
final EditText d = (EditText) findViewById(R.id.editEmail2);
final EditText e = (EditText) findViewById(R.id.editEmail3);
Button save = (Button) findViewById(R.id.save);
final Spinner f = (Spinner) findViewById(R.id.spinner);
final Spinner g = (Spinner) findViewById(R.id.spinner2);
final String mobile = b.getText().toString();
String[] items = new String[]{"Cairo","Giza", "Alexandria","Others"};
// Create an ArrayAdapter using the string array and a default spinner layout
ArrayAdapter<String> adapter = new ArrayAdapter<String>(MainActivity.this, android.R.layout.simple_spinner_item, items);
// Specify the layout to use when the list of choices appears
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
f.setAdapter(adapter);
f.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView <?> parent, View view,
int position, long id) {
if (f.getSelectedItem().equals("Cairo")) {
ArrayAdapter adapter2 = ArrayAdapter.createFromResource(MainActivity.this,
R.array.cairo, android.R.layout.simple_spinner_item);
g.setAdapter(adapter2);
} else if (f.getSelectedItem().equals("Giza")) {
ArrayAdapter adapter3 = ArrayAdapter.createFromResource(MainActivity.this,
R.array.Giza, android.R.layout.simple_spinner_item);
g.setAdapter(adapter3);
} else if (f.getSelectedItem().equals("Alexandria")) {
ArrayAdapter adapter4 = ArrayAdapter.createFromResource(MainActivity.this,
R.array.Alex, android.R.layout.simple_spinner_item);
g.setAdapter(adapter4);
} else {
ArrayAdapter adapter5 = ArrayAdapter.createFromResource(MainActivity.this,
R.array.Others, android.R.layout.simple_spinner_item);
g.setAdapter(adapter5);
}
}
@Override
public void onNothingSelected(AdapterView <?> parent) {
// TODO Auto-generated method stub
}
});
mFirebaseInstance = FirebaseDatabase.getInstance();
// get reference to 'users' node
mFirebaseDatabase = mFirebaseInstance.getReference("owners");
mFirebaseDatabase.push().setValue(ownerId);
// store app title to 'app_title' node
mFirebaseInstance.getReference("E7gzlykora").setValue("Realtime Database");
save.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent1 = new Intent(MainActivity.this, com.example.android.e7gzlykora.Loginowner.class);
intent1.putExtra("mobile",mobile);
startActivity(intent1);
Toast.makeText(MainActivity.this,mobile,Toast.LENGTH_LONG).show();
final String zone1 = f.getSelectedItem().toString();
final String zone2 = g.getSelectedItem().toString();
final String name = a.getText().toString();
final String mobile = b.getText().toString();
final String fieldname = c.getText().toString();
final String address = d.getText().toString();
final String cost = e.getText().toString();
// Check for already existed userId
if (TextUtils.isEmpty(ownerId)) {
createUser(name, mobile, fieldname, address, cost, zone1, zone2);
} else {
updateUser(name, mobile, fieldname, address, cost, zone1, zone2);
}
}
});
return;
}
/**
* Creating new user node under 'users'
*/
private void createUser(String name, String mobile, String fieldname, String address, String cost, String zone1, String zone2) {
// TODO
// In real apps this userId should be fetched
// by implementing firebase auth
if (TextUtils.isEmpty(ownerId)) {
ownerId = mFirebaseDatabase.push().getKey();
}
owner owner = new owner(name, mobile, fieldname, address, cost, zone1, zone2);
mFirebaseDatabase.child(ownerId).setValue(owner);
if (!TextUtils.isEmpty(name))
mFirebaseDatabase.child(ownerId).child("name").setValue(name);
if (!TextUtils.isEmpty(mobile))
mFirebaseDatabase.child(ownerId).child("mobile").setValue(mobile);
if (!TextUtils.isEmpty(fieldname))
mFirebaseDatabase.child(ownerId).child("Field Name").setValue(fieldname);
if (!TextUtils.isEmpty(address))
mFirebaseDatabase.child(ownerId).child("Address").setValue(address);
if (!TextUtils.isEmpty(cost))
mFirebaseDatabase.child(ownerId).child("Cost").setValue(cost);
if (!TextUtils.isEmpty(zone1))
mFirebaseDatabase.child(ownerId).child("Zone1").setValue(zone1);
if (!TextUtils.isEmpty(zone2))
mFirebaseDatabase.child(ownerId).child("Zone2").setValue(zone2);
addUserChangeListener();
}
/**
* User data change listener
*/
private void addUserChangeListener() {
// User data change listener
mFirebaseDatabase.child(ownerId).addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
owner owner = dataSnapshot.getValue(owner.class);
// Check for null
if (owner == null) {
Log.e(TAG, "owner data is null!");
return;
}
Log.e(TAG, "owner data is changed!" + owner.name + ", " + owner.mobile+ ", " + owner.fieldname+ ", " + owner.address+ ", " + owner.cost+ ", " + owner.zone1+ ", " + owner.zone2);
}
@Override
public void onCancelled(DatabaseError error) {
// Failed to read value
Log.e(TAG, "Failed to read user", error.toException());
}
});
}
private void updateUser(final String name, final String mobile, final String fieldname, final String address, final String cost, final String zone1, final String zone2) {
// updating the user via child nodes
if (!TextUtils.isEmpty(name))
mFirebaseDatabase.child(ownerId).child("name").setValue(name);
if (!TextUtils.isEmpty(mobile))
mFirebaseDatabase.child(ownerId).child("mobile").setValue(mobile);
if (!TextUtils.isEmpty(fieldname))
mFirebaseDatabase.child(ownerId).child("Field Name").setValue(fieldname);
if (!TextUtils.isEmpty(address))
mFirebaseDatabase.child(ownerId).child("Address").setValue(address);
if (!TextUtils.isEmpty(cost))
mFirebaseDatabase.child(ownerId).child("Cost").setValue(cost);
if (!TextUtils.isEmpty(zone1))
mFirebaseDatabase.child(ownerId).child("Zone1").setValue(zone1);
if (!TextUtils.isEmpty(zone2))
mFirebaseDatabase.child(ownerId).child("Zone2").setValue(zone2);
mFirebaseDatabase.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(final DataSnapshot dataSnapshot) {
for (DataSnapshot data : dataSnapshot.getChildren()) {
//If email exists then toast shows else store the data on new key
if (!data.getValue(owner.class).getMobile().equals(mobile)) {
mFirebaseDatabase.child(mFirebaseDatabase.push().getKey()).setValue(new owner(name, mobile, fieldname, address, cost, zone1, zone2));
} else {
Toast.makeText(MainActivity.this, "Mobile Number Already exists.", Toast.LENGTH_SHORT).show();
}
}
}
@Override
public void onCancelled(final DatabaseError databaseError) {
}
});
}
}<file_sep>/Searcharabic.java
package com.example.android.e7gzlykora;
import android.support.v7.app.AppCompatActivity;
public class Searcharabic extends AppCompatActivity {
}
<file_sep>/searchActivity.java
package com.example.android.e7gzlykora;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.DatePickerDialog;
import android.app.assist.AssistStructure;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.icu.util.Calendar;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.RequiresApi;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CalendarView;
import android.widget.CheckBox;
import android.widget.DatePicker;
import android.widget.Spinner;
import android.widget.Toast;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
public class searchActivity extends AppCompatActivity {
private static final String TAG = searchActivity.class.getSimpleName();
DatePickerDialog.OnDateSetListener mDateSetListener = null;
String mobile;
String name;
private DatabaseReference mFirebaseDatabase;
private String UserId;
private CheckBox single;
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_search);
final Spinner zone1 = (Spinner) findViewById(R.id.spinner);
final Spinner zone2 = (Spinner) findViewById(R.id.spinner2);
String[] items = new String[]{"Cairo", "Giza", "Alexandria", "Others"};
// Create an ArrayAdapter using the string array and a default spinner layout
ArrayAdapter <String> adapter = new ArrayAdapter <String>(searchActivity.this, android.R.layout.simple_spinner_item, items);
// Specify the layout to use when the list of choices appears
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
zone1.setAdapter(adapter);
zone1.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView <?> parent, View view,
int position, long id) {
if (zone1.getSelectedItem().equals("Cairo")) {
ArrayAdapter adapter2 = ArrayAdapter.createFromResource(searchActivity.this,
R.array.cairo, android.R.layout.simple_spinner_item);
zone2.setAdapter(adapter2);
} else if (zone1.getSelectedItem().equals("Giza")) {
ArrayAdapter adapter3 = ArrayAdapter.createFromResource(searchActivity.this,
R.array.Giza, android.R.layout.simple_spinner_item);
zone2.setAdapter(adapter3);
} else if (zone1.getSelectedItem().equals("Alexandria")) {
ArrayAdapter adapter4 = ArrayAdapter.createFromResource(searchActivity.this,
R.array.Alex, android.R.layout.simple_spinner_item);
zone2.setAdapter(adapter4);
} else {
ArrayAdapter adapter5 = ArrayAdapter.createFromResource(searchActivity.this,
R.array.Others, android.R.layout.simple_spinner_item);
zone2.setAdapter(adapter5);
}
}
@Override
public void onNothingSelected(AdapterView <?> parent) {
// TODO Auto-generated method stub
}
});
final Spinner time1 = (Spinner) findViewById(R.id.spinner3);
final Spinner time2 = (Spinner) findViewById(R.id.spinner4);
Button search = (Button) findViewById(R.id.search);
// Create an ArrayAdapter using the string array and a default spinner layout
ArrayAdapter <CharSequence> adapter3 = ArrayAdapter
.createFromResource(this, R.array.Time,
android.R.layout.simple_spinner_item);
// Specify the layout to use when the list of choices appears
adapter3.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
time1.setAdapter(adapter3);
ArrayAdapter <CharSequence> adapter1 = ArrayAdapter
.createFromResource(this, R.array.Time,
android.R.layout.simple_spinner_item);
// Specify the layout to use when the list of choices appears
adapter1.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
time2.setAdapter(adapter1);
final CheckBox single = (CheckBox) findViewById(R.id.checkbox1);
final CheckBox weekly = (CheckBox) findViewById(R.id.checkbox2);
single.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (single.isChecked()) {
String singletime = single.getText().toString();
}
}
});
weekly.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (weekly.isChecked()) {
String weeklytime = weekly.getText().toString();
}
}
});
final CalendarView calendar = (CalendarView) findViewById(R.id.calendar);
calendar.setOnClickListener(new View.OnClickListener() {
@RequiresApi(api = Build.VERSION_CODES.N)
@Override
public void onClick(View v) {
Calendar cal = Calendar.getInstance();
int year = cal.get(Calendar.YEAR);
int month = cal.get(Calendar.MONTH);
int day = cal.get(Calendar.DAY_OF_MONTH);
DatePickerDialog dialog = new DatePickerDialog(
searchActivity.this,
android.R.style.Theme_Holo_Light_Dialog_MinWidth,
mDateSetListener,
year, month, day);
dialog.getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
dialog.show();
}
});
mDateSetListener = new DatePickerDialog.OnDateSetListener() {
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onDateSet(DatePicker datePicker, int year, int month, int day) {
month = month + 1;
Log.d(TAG, "onDateSet: mm/dd/yyy: " + month + "/" + day + "/" + year);
String x = month + "/" + day + "/" + year;
calendar.setDate(Long.parseLong(x));
}
};
FirebaseDatabase mFirebaseInstance = FirebaseDatabase.getInstance();
// get reference to 'users' node
mFirebaseDatabase = mFirebaseInstance.getReference("users");
// store app title to 'app_title' node
mFirebaseInstance.getReference("E7gzlykora").setValue("Realtime Database");
search.setOnClickListener(new View.OnClickListener() {
@RequiresApi(api = Build.VERSION_CODES.N)
@Override
public void onClick(View view) {
Intent intent1 = new Intent(searchActivity.this, com.example.android.e7gzlykora.prospectowner_listview.class);
startActivity(intent1);
final String fromtime = time1.getSelectedItem().toString();
final String totime = time2.getSelectedItem().toString();
final String zone3 = zone1.getSelectedItem().toString();
final String zone4 = zone2.getSelectedItem().toString();
String x = String.valueOf(calendar.getDate());
// Check for already existed userId
if (TextUtils.isEmpty(UserId)) {
createUser(fromtime, totime, zone3, zone4, x);
} else {
}
}
});
return;
}
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
private void createUser(String fromtime, String totime, String zone3, String zone4, String x) {
// TODO
// In real apps this userId should be fetched
// by implementing firebase auth
if (TextUtils.isEmpty(UserId)) {
UserId = mFirebaseDatabase.push().getKey();
}
if (!TextUtils.isEmpty(fromtime))
mFirebaseDatabase.child(UserId).child("from").setValue(fromtime);
if (!TextUtils.isEmpty(totime))
mFirebaseDatabase.child(UserId).child("to").setValue(totime);
if (!TextUtils.isEmpty(zone3))
mFirebaseDatabase.child(UserId).child("Area").setValue(zone3);
if (!TextUtils.isEmpty(zone4))
mFirebaseDatabase.child(UserId).child("Zone").setValue(zone4);
mFirebaseDatabase.child(UserId).child("Date").setValue(x);
addUserChangeListener();
}
/**
* User data change listener
*/
private void addUserChangeListener() {
// User data change listener
mFirebaseDatabase.child(UserId).addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
User user = dataSnapshot.getValue(User.class);
// Check for null
if (user == null) {
Log.e(TAG, "user data is null!");
return;
}
Log.e(TAG, "user data is changed!" + user.fromtime + ", " + user.totime + ", " + user.zone3 + ", " + user.zone4 + ", " + user.x);
}
@Override
public void onCancelled(DatabaseError error) {
// Failed to read value
Log.e(TAG, "Failed to read user", error.toException());
}
});
}
}
<file_sep>/prospectowner_listview.java
package com.example.android.e7gzlykora;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.RequiresApi;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.widget.TextView;
import com.google.firebase.database.ChildEventListener;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
public class prospectowner_listview extends AppCompatActivity {
RecyclerView list;
ArrayList <com.example.android.e7gzlykora.owner> ownerlist = new ArrayList <>();
private DatabaseReference mFirebaseDatabase;
private FirebaseDatabase mFirebaseInstance;
owner owner;
customAdapter adapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.prospectowner_listview);
list = findViewById(R.id.list);
LinearLayoutManager manager = new LinearLayoutManager(this);
list.setLayoutManager(manager);
list.setHasFixedSize(true);
manager.setOrientation(LinearLayoutManager.VERTICAL);
adapter = new customAdapter(prospectowner_listview.this, ownerlist);
list.setAdapter(adapter);
mFirebaseInstance = FirebaseDatabase.getInstance();
mFirebaseDatabase = mFirebaseInstance.getReference("owners");
mFirebaseDatabase.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
ArrayList <com.example.android.e7gzlykora.owner> myList = new ArrayList <>();
for (DataSnapshot data : dataSnapshot.getChildren()) {
owner o = data.getValue(owner.class);
myList.add(o);
}
if(myList.size()>0){
Log.d("check", "data here ");
adapter.addData(myList);
}
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
}
| 9b75c94598172a620a760a32fb31268890b670aa | [
"Java"
] | 5 | Java | mrahmedmamdouh/E7gzlyKoraa | 1a4b2b70294b233336314ce7532d76c43e35de52 | 3bd58877dcdd0237532fc63e7cca36d13499fec5 |
refs/heads/master | <file_sep>package com.flightservices;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
@SpringBootApplication
public class App implements CommandLineRunner {
public static List<Flight> flightServices;
public static void main(String[] args) {
SpringApplication.run(App.class, args);
}
@Override
public void run(String[] args) {
// read json and write to db
ObjectMapper mapper = new ObjectMapper();
TypeReference<List<Flight>> typeReference = new TypeReference<List<Flight>>(){};
InputStream inputStream = TypeReference.class.getResourceAsStream("/flightdata.json");
try {
flightServices = mapper.readValue(inputStream,typeReference);
System.out.println(flightServices);
System.out.println("loaded flight services ::: " + flightServices.size());
}catch (IOException e){
System.out.println("Unable to load flight services data: " + e.getMessage());
}
}
}<file_sep>import { Injectable } from '@angular/core';
import { HttpClient, HttpParams,HttpHeaders } from '@angular/common/http';
@Injectable({
providedIn: 'root'
})
export class FlightserviceService {
constructor(private httpClient: HttpClient) { }
private headers = {headers: new HttpHeaders({
'Content-Type': 'application/json',
'Accept': 'application/json',
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Methods': 'GET, POST, PATCH, PUT, DELETE, OPTIONS',
})};
search(flightNumber:string,origin:string,destination:string,date:string){
const params = new HttpParams();
const url = "http://localhost:8080/flights/"+
flightNumber+"/origin/"+origin+"/destination/"+destination+"/date/"+date;
console.log(url);
return this.httpClient.get<any>(url, { params });
}
}
<file_sep>import { Component,OnInit,ViewChild } from '@angular/core';
import {FlightserviceService} from '../app/flightservice.service';
import { MatTableDataSource,MatDatepickerInputEvent } from '@angular/material';
import { flight} from './flight';
import { HttpErrorResponse } from '@angular/common/http';
import { DatePipe } from '@angular/common';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
export class AppComponent {
title = 'my-app';
flightServicesCol: string[] = ['flightNumber', 'carrier','origin','departure','destination','arrival','distance','travelTime', 'status'];
dataSource = new MatTableDataSource();
flightData : flight[] = [];
errorMessage:string = '';
constructor(private fs:FlightserviceService,private datePipe: DatePipe){}
search(flightNumber:string,origin:string,destination:string, date:string){
console.log(flightNumber +"-"+origin+"-"+destination+"-"+date);
if(date === undefined){
return this.errorMessage = 'Date cannot be Empty - Please enter date';
}
if(flightNumber === undefined || (origin === undefined && destination === undefined)){
return this.errorMessage = 'Flight Number OR with origin AND destination is required';
}
console.log(this.datePipe.transform(date, "yyyy-MM-dd'T'HH:mm:ss"));
const updateDate = this.datePipe.transform(date, "yyyy-MM-dd'T'HH:mm:ss");
this.fs.search(flightNumber,origin,destination,updateDate).subscribe(res =>{
console.log(res);
this.flightData = res;
this.dataSource = new MatTableDataSource(this.flightData);
}, error => this.handleError(error))
}
handleError(error) {
if (error instanceof HttpErrorResponse) {
if(200 == error.status && error.name === "HttpErrorResponse"){
this.errorMessage = 'BAD REQUEST - Null/change Parameters';
}
if(400 == error.status){
this.errorMessage = 'Flight Number Cannot be empty';
}
}
}
}<file_sep># FlightServices
FlightServices
<file_sep>export class flight{
flightNumber: String ;
carrier: String ;
origin: String ;
departure: String ;
destination: String ;
arrival: String ;
aircraft: String ;
distance: number ;
travelTime: String ;
status: String ;
} | 2a0840d3b5149b3de8cf08eabe3993ebe2834554 | [
"Markdown",
"Java",
"TypeScript"
] | 5 | Java | abhavanam/FlightServices | 4b599f778f2af4455d402768a17571f1a83b9f61 | 5c6b03d099aa71cf510c9b6c23975d31bc90e37c |
refs/heads/master | <file_sep>import React, { useState } from "react";
import "./App.css";
import { useForm } from "react-hook-form";
import * as yup from 'yup'
const Schema = yup.object().shape({
username: yup.string().required().min(3),
email: yup.string().email().required(),
password: yup.string().required().min(5)
});
function App() {
const { handleSubmit, register, errors } = useForm({ validationSchema: Schema });
function onSubmit(data) {
console.log(data);
}
console.log(errors);
return (
<div>
<h3>react-hook-form</h3>
<h3>User Sign Up</h3>
<form autoComplete="off" onSubmit={handleSubmit(onSubmit)}>
<label>Username</label>
<input type="text" name="username" ref={register} />
{errors.username && <p>{errors.username.message}</p>}
<label>Email</label>
<input type="text" name="email" ref={register} />
{errors.email && <p>{errors.email.message}</p>}
<label>Password</label>
<input type="text" name="password" ref={register} />
{errors.password && <p>{errors.password.message}</p>}
<span>
<input type="checkbox" name="remember" ref={register} />
<label>Remember Me</label>
</span>
<button type="submit">Submit</button>
</form>
</div>
);
}
export default App;
| 7e8121193337392e3a327beb1c053d7d08f9c313 | [
"JavaScript"
] | 1 | JavaScript | imimran/react-hook-form | b38ce1f48c96a884d45cc96e2570b1848e39bfa5 | af390dae24b0467d8508fe36f47eca5bd43d2c1b |
refs/heads/master | <file_sep>//
// Created by fabulousfob on 12/1/18.
//
#ifndef TESTPLANNER_PROJECTS_H
#define TESTPLANNER_PROJECTS_H
class Projects {
};
#endif //TESTPLANNER_PROJECTS_H
<file_sep>cmake_minimum_required(VERSION 3.12)
project(TestPlanner)
set(CMAKE_CXX_STANDARD 14)
add_executable(TestPlanner main.cpp Projects.cpp Projects.h Paper.cpp Paper.h Exam.cpp Exam.h)<file_sep>//
// Created by fabulousfob on 12/1/18.
//
#ifndef TESTPLANNER_PAPER_H
#define TESTPLANNER_PAPER_H
class Paper {
};
#endif //TESTPLANNER_PAPER_H
<file_sep>//
// Created by fabulousfob on 12/1/18.
//
#ifndef TESTPLANNER_EXAM_H
#define TESTPLANNER_EXAM_H
class Exam {
};
#endif //TESTPLANNER_EXAM_H
<file_sep>//
// Created by fabulousfob on 12/1/18.
//
#include "Exam.h"
<file_sep>//
// Created by fabulousfob on 12/1/18.
//
#include "Projects.h"
<file_sep>//
// Created by fabulousfob on 12/1/18.
//
#include "Paper.h"
| f4a80f9ca227b5bb5d11b94c598f4f1bd78659a3 | [
"CMake",
"C++"
] | 7 | C++ | FabianMullerDahlberg/TestPlanner | cfe9f0fb73892090d56d9d83254fb5beb2ea82b4 | 23355af5bdc0413b535c7da98b2debf223e2ad14 |
refs/heads/master | <repo_name>icostin/jbot<file_sep>/jbot.h
#ifndef _JBOT_H
#define _JBOT_H
#include <zlx.h>
#include <hbs.h>
#define S(_s) ((uint8_t const *) _s)
#define E(_rv, ...) do { zlx_fprint(hbs_err, __VA_ARGS__); return (_rv); } while (0)
uint8_t ZLX_CALL elal_test
(
size_t elem_size,
uint32_t max_chain_len,
size_t alloc_count,
size_t free_count
);
/* mth_inc_test *************************************************************/
uint8_t ZLX_CALL mth_inc_test
(
size_t nt,
uint64_t ni,
uint8_t fake_mutex
);
#endif /* _JBOT_H */
<file_sep>/elal_test.c
#include "jbot.h"
static uint8_t ZLX_CALL test
(
zlx_elal_t * ZLX_RESTRICT ea,
void * * etab,
size_t alloc_count,
size_t free_count
)
{
size_t i, n;
if (!etab) E(1, "error: no mem for elal-test\n");
for (i = 0; i < alloc_count; ++i)
{
etab[i] = zlx_elal_alloc(ea, "elem");
if (!etab[i]) E(2, "error: elal alloc failed after $z elements\n", i);
}
for (i = 0; i < free_count; ++i)
{
zlx_elal_free(ea, etab[i]);
if (ea->chain_len != (i + 1 > ea->max_chain_len ? ea->max_chain_len : i + 1))
E(3, "error: unexpected chain len of $z "
"after freeing element #$z\n",
ea->chain_len, i);
}
n = ea->chain_len;
for (i = 0; i < free_count; ++i)
{
if ((i <= n && ea->chain_len != n - i)
|| (i > n && ea->chain_len != 0))
E(4, "error: unexpected chain len when reallocating item #$z\n", i);
etab[i] = zlx_elal_alloc(ea, "elem2");
if (!etab[i]) E(2, "error: elal alloc failed after $z elements\n", i);
}
if ((i <= n && ea->chain_len != n - i)
|| (i > n && ea->chain_len != 0))
E(4, "error: unexpected chain len when reallocating item #$z\n", i);
for (i = 0; i < alloc_count; ++i)
{
zlx_elal_free(ea, etab[i]);
if (ea->chain_len != (i + 1 > ea->max_chain_len ? ea->max_chain_len : i + 1))
E(3, "error: unexpected chain len of $z "
"after freeing element #$z\n",
ea->chain_len, i);
}
return 0;
}
uint8_t ZLX_CALL elal_test
(
size_t elem_size,
uint32_t max_chain_len,
size_t alloc_count,
size_t free_count
)
{
zlx_elal_t ea;
unsigned int s;
void * * etab;
if (free_count > alloc_count) free_count = alloc_count;
s = zlx_elal_init(&ea, hbs_ma, NULL, NULL, elem_size, max_chain_len);
if (s) E(1, "elal-test: failed to init elal mutex\n");
etab = hbs_alloc(sizeof(void *) * alloc_count, "etab");
s = test(&ea, etab, alloc_count, free_count);
zlx_elal_finish(&ea);
if (etab) hbs_free(etab, sizeof(void *) * alloc_count);
return s;
}
<file_sep>/mth_test.c
#include "jbot.h"
typedef struct context_s context_t;
struct context_s
{
zlx_mutex_xfc_t * mutex_xfc;
zlx_mutex_t * mutex;
uint64_t result;
uint64_t inc_count;
zlx_tid_t * tid_table;
};
/* worker *******************************************************************/
uint_fast8_t ZLX_CALL worker (void * arg)
{
context_t * c = arg;
uint64_t i;
for (i = 0; i < c->inc_count; ++i)
{
c->mutex_xfc->lock(c->mutex);
c->result++;
c->mutex_xfc->unlock(c->mutex);
}
return 0;
}
/* test *********************************************************************/
static uint8_t ZLX_CALL test
(
context_t * c,
size_t nt,
uint64_t ni,
uint8_t fake_mutex
)
{
size_t tx, itx;
zlx_mth_status_t ts;
ZLX_ARRAY_ALLOC(c->tid_table, hbs_ma, nt, "mth_inc_test.tid_table");
if (!c->tid_table) E(2, "no mem for thread table\n");
c->mutex_xfc = fake_mutex ? &zlx_nosup_mth_xfc.mutex : &hbs_mth_xfc.mutex;
c->mutex = zlx_mutex_create(hbs_ma, c->mutex_xfc, "mth_inc_test.mutex");
c->result = 0;
c->inc_count = ni;
for (tx = 0; tx < nt; ++tx)
{
ts = hbs_thread_create(c->tid_table + tx, worker, c);
if (ts)
{
HBS_LE("mth-inc-test: failed to create worker thread #$z: "
"(error code $i)\n", tx, ts);
break;
}
}
for (itx = 0; itx < tx; ++itx)
{
ts = hbs_thread_join(c->tid_table[itx], NULL);
if (ts)
{
HBS_LE("mth-inc-test: failed to join worker thread #$z: "
"(error code $i)\n", itx, ts);
}
}
ZLX_ARRAY_FREE(c->tid_table, hbs_ma, nt);
zlx_mutex_destroy(c->mutex, hbs_ma, c->mutex_xfc);
return !(c->result == ni * nt);
}
/* mth_inc_test *************************************************************/
uint8_t ZLX_CALL mth_inc_test
(
size_t nt,
uint64_t ni,
uint8_t fake_mutex
)
{
context_t c;
uint8_t r;
r = test(&c, nt, ni, fake_mutex);
zlx_fprint(hbs_out, "mth-inc-test: $z * $q -> $q $s\n",
nt, ni, c.result, r ? "FAILED" : "ok");
return r;
}
<file_sep>/GNUmakefile
projects := jbot jbotd
jbot_cfg := release checked debug
jbot_cflags := -DZLX_STATIC -DHBS_STATIC
jbot_csrc := jbot.c elal_test.c mth_test.c
jbot_ldflags = -static -lhbs$($3_sfx) -lzlx$($3_sfx) $(if $(findstring -windows,$($4_target)),-mconsole -municode,-lpthread)
jbot_ldep = $(call prod_path,zlx,slib,$3,$4) $(call prod_path,hbs,slib,$3,$4)
jbotd_cfg := release checked debug
jbotd_cflags :=
jbotd_csrc := $(jbot_csrc)
jbotd_ldflags = $(filter-out -static,$(jbot_ldflags))
jbotd_ldep = $(call prod_path,zlx,dlib,$3,$4) $(call prod_path,hbs,dlib,$3,$4)
include icobld.mk
<file_sep>/jbot.c
#include <zlx.h>
#include <hbs.h>
#include "jbot.h"
HBS_MAIN(jbot_main);
/* fchunk *******************************************************************/
uint8_t ZLX_CALL fchunk (uint8_t const * path, uint64_t ofs, uint64_t len)
{
uint8_t buffer[0x1000];
zlx_file_t * zf;
zlx_file_status_t zfs;
hbs_status_t hs;
int64_t pos;
ptrdiff_t r, w;
uint64_t t;
hs = hbs_file_open_ro(&zf, path);
if (hs) E(1, "fchunk error: failed to open \"$es\" (hs: $i)\n", path, hs);
pos = zlx_seek64(zf, ofs, ZLXF_SET);
if (pos < 0)
{
zfs = hbs_file_close(zf);
E(2, "fchunk error: seek failed (zfs: $i)\n", (int) -pos);
}
for (t = 0; t < len; )
{
size_t chunk_size = sizeof(buffer);
if (t + chunk_size > len) chunk_size = (size_t) (len - t);
r = zlx_read(zf, buffer, chunk_size);
if (r < 0)
{
hbs_file_close(zf);
E(4, "fchunk error: read error (zfs: $i)\n", (int) -r);
}
w = zlx_write(hbs_out, buffer, r);
if (w < 0)
{
hbs_file_close(zf);
E(5, "fchunk error: write error (zfs: $i)\n", (int) -w);
}
t += r;
if ((size_t) r != chunk_size)
{
hbs_file_close(zf);
E(6, "fchunk error: not enough data available (read $q=$xq)\n",
t, t);
}
}
zfs = hbs_file_close(zf);
if (zfs) E(3, "fchunk error: close error (zfs: $i)\n", zfs);
return 0;
}
/* logo *********************************************************************/
uint8_t logo ()
{
return 0 > zlx_fprint(hbs_out,
"jbot - just a bunch of tests -- ver 0.00\n");
}
/* ver **********************************************************************/
uint8_t ver ()
{
if (logo()) return 1;
if (zlx_fprint(hbs_out, "using $s\nusing $s\n",
zlx_lib_name, hbs_lib_name) < 0)
return 1;
return 0;
}
/* help *********************************************************************/
uint8_t help ()
{
if (logo()) return 1;
return 0 > zlx_fprint(hbs_out,
"usage: jbot CMD [OPTS] ARGS\n"
"commands:\n"
" help prints this\n"
" version prints the version of this tool\n"
" fchunk FILE OFS LEN outputs a chunk from a file\n"
" elal-test ES MCL AC FC tests element lookaside list allocator\n"
" mth-inc-test [-x] T I T threads each increment I times a global;\n"
" use '-x' switch to use dummy mutex locking\n"
"options:\n"
" -h --help prints this and exits\n"
" --version prints the version of this tool and exits\n"
);
}
/* run **********************************************************************/
static uint8_t ZLX_CALL run
(
unsigned int argc,
uint8_t const * const * argv,
uint8_t const * * a
)
{
unsigned int n, i, j, parse_opts = 1;
uint8_t const * cmd = NULL;
for (n = 0, i = 1; i < argc; ++i)
{
if (parse_opts && argv[i][0] == '-')
{
if (argv[i][1] == '-')
{
if (argv[i][2] == 0) { parse_opts = 0; continue; }
/* long option */
if (!zlx_u8a_zcmp(&argv[i][2], S("help"))) return help();
if (!zlx_u8a_zcmp(&argv[i][2], S("version"))) return ver();
E(124, "invoke error: unknown long option '$es'\n", argv[i]);
}
for (j = 1; argv[i][j]; ++j)
switch (argv[i][j])
{
case 'h':
return help();
}
}
if (!cmd) cmd = argv[i];
else a[n++] = argv[i];
}
if (!cmd || !zlx_u8a_zcmp(cmd, S("help"))) return help();
if (!zlx_u8a_zcmp(cmd, S("ver"))) return ver();
if (!zlx_u8a_zcmp(cmd, S("fchunk")))
{
uint64_t ofs, len;
if (n != 3) E(125, "invoke error: fchunk needs 3 args\n");
if (zlx_u64_from_str(a[1], zlx_u8a_zlen(a[1]), 0, &ofs, NULL))
E(125, "invoke error: bad number '$es'\n", a[1]);
if (zlx_u64_from_str(a[2], zlx_u8a_zlen(a[2]), 0, &len, NULL))
E(125, "invoke error: bad number '$s'\n", a[2]);
return fchunk(a[0], ofs, len);
}
else if (!zlx_u8a_zcmp(cmd, S("elal-test")))
{
uint64_t elem_size, mcl, ac, fc;
if (n != 4) E(125, "invoke error: elal-test needs 4 args\n");
if (zlx_u64_from_str(a[0], zlx_u8a_zlen(a[0]), 0, &elem_size, NULL))
E(125, "invoke error: bad number '$es'\n", a[0]);
if (zlx_u64_from_str(a[1], zlx_u8a_zlen(a[1]), 0, &mcl, NULL))
E(125, "invoke error: bad number '$es'\n", a[1]);
if (zlx_u64_from_str(a[2], zlx_u8a_zlen(a[2]), 0, &ac, NULL))
E(125, "invoke error: bad number '$es'\n", a[2]);
if (zlx_u64_from_str(a[3], zlx_u8a_zlen(a[3]), 0, &fc, NULL))
E(125, "invoke error: bad number '$es'\n", a[3]);
return elal_test((size_t) elem_size, (uint32_t) mcl,
(size_t) ac, (size_t) fc);
}
else if (!zlx_u8a_zcmp(cmd, S("mth-inc-test")))
{
uint8_t fake_mutex = 0;
uint64_t th_count, inc_count;
if (n && !zlx_u8a_zcmp(a[0], S("-x"))) { fake_mutex = 1; --n; ++a; }
if (n != 2) E(125, "invoke error: mth-inc-test needs 2 counters\n");
if (zlx_u64_from_str(a[0], zlx_u8a_zlen(a[0]), 0, &th_count, NULL))
E(125, "invoke error: bad number '$es'\n", a[0]);
if (zlx_u64_from_str(a[1], zlx_u8a_zlen(a[1]), 0, &inc_count, NULL))
E(125, "invoke error: bad number '$es'\n", a[1]);
return mth_inc_test(th_count, inc_count, fake_mutex);
}
return 0;
}
/* jbot_main ****************************************************************/
uint8_t ZLX_CALL jbot_main (unsigned int argc, uint8_t const * const * argv)
{
uint8_t r;
uint8_t const * * a;
HBS_DM("argc: $i; hbs_ma=$p", argc, hbs_ma);
ZLX_ASSERT(argc > 0);
a = hbs_alloc(argc * sizeof(uint8_t const *), "cmd args");
if (!a) E(125, "error: no mem for processing arguments\n");
r = run(argc, argv, a);
hbs_free(a, argc * sizeof(uint8_t const *));
return r;
}
<file_sep>/README.md
# jbot
just a bunch of tests
| 7aae27d62331ee493650f6a032660af357995905 | [
"Markdown",
"C",
"Makefile"
] | 6 | C | icostin/jbot | 6b5433b5f43043414267a9741e148827c4b8665d | 9958f2057da7d082fd5b4de2ecb556618e8e90aa |
refs/heads/master | <repo_name>jackding96/functional-lib<file_sep>/batch.js
function batch(func) {
let res = [];
let timer;
let test;
return function() {
function callFunc() {
test = func.apply(this, arguments);
return test;
}
timer = setTimeout(callFunc, 1000);
}
}
const func = function() {
return 'hello';
}
async function runBatch() {
const batched = batch(func);
for (let i = 0; i < 10; i++) {
const res = await batched();
console.log(res);
}
}
runBatch();<file_sep>/once.js
function once(func) {
let called, memo;
return function() {
if (called) { return memo;}
called = true;
memo = func.apply(this, arguments);
return memo;
}
}
const func = function() {
console.log('FUNC called!');
}
const onceFunc = once(func);
onceFunc();
onceFunc();
onceFunc();
onceFunc();<file_sep>/flatten.js
function flatten(arr) {
let flat = [];
arr.forEach((el) => {
if (Array.isArray(el)) {
flat = flat.concat(flatten(el));
} else { flat.push(el); }
});
return flat;
}
const arr = [1,2,[3,4,5,[6,7,[8],9],[10,11], 12, [13], 14],[15,16,[17]]];
console.log(flatten(arr)); | 31485dfcbc74c645b4b086162168e1e5f4666e64 | [
"JavaScript"
] | 3 | JavaScript | jackding96/functional-lib | 850e274de6be18c3ec574507cec8d3358e65db81 | 64a6296c1e774fe2070400a4cd963178be8e9d0c |
refs/heads/master | <repo_name>weberandphper/phpmail<file_sep>/phpmail/index.php
<?php
/** 分别封装了利用QQ和Gmai来发送邮件的方法
*
*
* 时间:2016/9/16
* 作者:anspray
*/
set_time_limit(0); //无时间限制,实际测试在apache上运行到后面会断
ignore_user_abort(); //关闭浏览器后程序依然运行
require_once './libs/class.phpmailer.php';
require_once './libs/function.php';
//QQ邮件发送
function QQmail($address,$toname,$new){
sendQQmail($address,$toname,$new);
}
//Gmail邮件发送
function gmail($address,$toname,$new){
sendGmail($address,$toname,$new);
}
function sendMail(){
$new = "hello world......"; //邮件信息
QQmail('<EMAIL>', 'user', $new); //发送人邮箱和信息 //发送邮件
}
for($i = 0;$i<=500;$i++){
sendMail();
//sendGmail();
}
?>
</body>
</html><file_sep>/phpmail/libs/function.php
<?php
/**经过测试,要是收件人不存在,若不出现错误依然返回true 也就是说在发送之前
* 自己需要些方法实现检测该邮箱是否真实有效
*
* 注意qq的密码需要自己到qq邮箱页面设置获取,并不是自己的qqmima
*
* gmail则不需要,就是自己的gmail账号密码,你也可以自己在添加新的
*
* 邮箱比如163或者是新浪,阿里等等
*
* 时间;2016/9/16
* 作者:anspray(一朵浪花)
*
*/
//采用QQ邮件服务器的发送函数
function sendQQmail($address,$toname,$new){
error_reporting(E_STRICT);
date_default_timezone_set('Asia/Shanghai'); //设置发送时区
$mail = new PHPMailer();
$mail->SMTPDebug = 1; //是否启用smtp的debug进行调试 开发环境建议开启 生产环境注释掉即可 默认关闭debug调试模式
$mail->isSMTP(); //使用smtp鉴权方式发送邮件,当然你可以选择pop方式 sendmail方式等 本文不做详解
$mail->SMTPAuth=true; //smtp需要鉴权 这个必须是true
$mail->Host = 'smtp.qq.com'; //链接qq域名邮箱的服务器地址
$mail->SMTPSecure = 'ssl'; //设置使用ssl加密方式登录鉴权
$mail->Port = 465; //设置ssl连接smtp服务器的远程服务器端口号 可选465或587
$mail->Helo = 'hello world'; //设置smtp的helo消息头 这个可有可无 内容任意
$mail->Hostname = 'hello world'; //设置发件人的主机域 可有可无 默认为localhost 内容任意,建议使用你的域名
$mail->CharSet = 'UTF-8'; //设置发送的邮件的编码 可选GB2312 我喜欢utf-8 据说utf8在某些客户端收信下会乱码
$mail->FromName = 'this is my test'; //设置发件人姓名(昵称) 任意内容,显示在收件人邮件的发件人邮箱地址前的发件人姓名
$mail->Username ='<EMAIL>'; //smtp登录的账号 这里填入字符串格式的qq号即可
$mail->Password = '<PASSWORD>'; //smtp登录的密码 这里填入“独立密码” 若为设置“独立密码”则填入登录qq的密码 建议设置“独立密码”
$mail->From = '<EMAIL>'; //设置发件人邮箱地址 这里填入上述提到的“发件人邮箱”
$mail->isHTML(true); //邮件正文是否为html编码 注意此处是一个方法 不再是属性 true或false
$mail->addAddress($address,$toname); //设置收件人邮箱地址 该方法有两个参数 第一个参数为收件人邮箱地址 第二参数为给该地址设置的昵称 不同的邮箱系统会自动进行处理变动 这里第二个参数的意义不大
$mail->Subject = 'hello world'; //添加该邮件的主题
$mail->Body = "$new"; //添加邮件正文 上方将isHTML设置成了true,则可以是完整的html字符串 如:使用file_get_contents函数读取本地的html文件
// $mail->addAttachment('./Jlib-1.1.0.js','Jlib.js');同样该方法可以多次调用 上传多个附件
$status = $mail->send();
if($status) {
echo 'Email sending'; //简单的判断与提示信息
}else{
echo 'Email sending failure:'.$mail->ErrorInfo;
}
}
//发送gmail的邮件函数
function sendGmail($address,$toname,$new){
error_reporting(E_STRICT);
date_default_timezone_set('Asia/Shanghai'); //设置发送时区
header("Content-Type :text/html; charset=utf-8"); //必须设置邮件编码,不然无法保证主题,标题乱码
$mail = new PHPMailer();
$mail->Charset = "UTF-8";
$mail->IsSMTP(); // telling the class to use SMTP
$mail->Host = "html5"; // SMTP server
$mail->SMTPDebug = 2; // enables SMTP debug information (for testing)
// 1 = errors and messages
// 2 = messages only
$mail->SMTPAuth = true; // enable SMTP authentication
$mail->SMTPSecure = "ssl"; // sets the prefix to the servier
$mail->Host = "smtp.gmail.com"; // sets GMAIL as the SMTP server
$mail->Port = 465; // set the SMTP port for the GMAIL server
$mail->Username = "***<EMAIL>"; // GMAIL username
$mail->Password = "<PASSWORD>*******"; // GMAIL password
$mail->SetFrom('<EMAIL>', '....');
$mail->AddReplyTo("<EMAIL>","...");
$mail->Subject = "rubbish";
$mail->AltBody = "To view the message, please use an HTML compatible email viewer!"; // optional, comment out and test
$mail->MsgHTML($new); //要发送的信息内容
// $address = $toname;
$mail->AddAddress($address, $toname);
// $mail->AddAttachment("images/phpmailer.gif"); // attachment
// $mail->AddAttachment("images/phpmailer_mini.gif"); // attachment
if(!$mail->Send()) {
echo "邮件发送失败: " . $mail->ErrorInfo;
} else {
echo "邮件发送成功!";
}
}
?> | 52c03a3616d2e9f378231a798492ed51671d68fd | [
"PHP"
] | 2 | PHP | weberandphper/phpmail | bb94879460ef8360bbfdd0174b5791b49c8529b7 | 81c297c4947591e60907f3aa15ea7d1a18356a53 |
refs/heads/master | <repo_name>yeskunall/webhooks-test<file_sep>/index.js
const app = require('express')();
const bodyParser = require('body-parser');
require('dotenv').config();
const port = 3000;
app.use(bodyParser.json());
app.post('/payload', (req, res, next) => {
console.log('Payload received:', req.body);
});
app.listen(port, _ => console.log(`Listening on port ${port}`));
| 6c2a971d975a42f1fc7b4c17a8fa67a6c3c2a260 | [
"JavaScript"
] | 1 | JavaScript | yeskunall/webhooks-test | 967ca166471a534a8942d2cb4e301600ff04c900 | 69a6e598a69583bd81ff710467d6f2620688a38f |
refs/heads/master | <file_sep>var RaceAverage = function(){
this.day_one = '07/08/2016'
this.start_dt = ['08:00 AM, DAY 1']
this.convertHour = function(hour, ampm){
console.log('converting', hour , ampm)
var hr = parseInt(hour)
if( hr >= 01 && hr <= 11 && ampm === 'PM'){
return Math.floor(hr + 12)
}else if (hr === 12 && ampm === 'PM'){
return hr
}else{
return Math.floor(hr - 12)
}
}
this.getMinDiff = function(start_dt , finish_dt){
console.log('start_dt', start_dt)
console.log('finish_dt', finish_dt)
var diff = (Math.abs(finish_dt.getTime() - start_dt.getTime()))
var minutes = Math.floor((diff/1000)/60)
console.log('minutes', minutes)
return minutes
},
this.avgMinutes = function(race_times){
try {
if(race_times.length <=0){
throw 'ERROR: empty array'
}
console.log('race start date & time', this.start_dt)
console.log('race finsih date & time', race_times)
var total_boat_times = 0
var d_one = this.day_one.split('/')
var st_dt = this.start_dt[0].split(' ')
var year = d_one[2]
var month = parseInt(d_one[0])-1
var new_st_date = new Date(year, month, d_one[1], st_dt[0].split(':')[0], st_dt[0].split(':')[1])
//start loop
for (var i = 0; i < race_times.length; i++ ){
var fn_dt = race_times[i].split(' ')
var day = (fn_dt[3] === '1')?d_one[1]:parseInt(d_one[1])+parseInt(fn_dt[3]-1)
var hour = this.convertHour(fn_dt[0].split(':')[0], fn_dt[1].replace(',', ''))
var mins = fn_dt[0].split(':')[1]
var new_fin_date = new Date(year, month, day, hour, mins )
total_boat_times += this.getMinDiff(new_st_date, new_fin_date);
}
var race_avg = Math.round(total_boat_times / race_times.length)
console.log('race average', race_avg)
return race_avg
}catch(err){
console.log(err)
return false
}
}
}<file_sep>var TxBlkg = {
createMultiArray:function(lines){
//first split string into array
var split_lines = lines.map(function(cur, idx){
return cur.split('')
});
///second push first item into new array
var ltrs = []
var final_lines = []
for (var i = 0; i < split_lines.length; i ++){
ltrs[i] = split_lines[i][0]
final_lines.push(ltrs)
};
//third join new array items into string
var output_lines = final_lines.map(function(cur, idx){
var join_array = cur.join().replace(/,/g, '')
return join_array
});
return output_lines
},
createNewArray:function(lines){
return lines[0].split('')
},
createSingleArray:function(lines){
return [lines.join().replace(/,/g, '')]
},
stackBlocks:function(pros_of_passage){
try{
var col_lines;
var lines = pros_of_passage
var len = lines.length
console.log('prose input: ', lines);
//case 1: array with len > 1 with string len > 1
//case 2: array with len <= 1 with string len > 1
//case 3: array with len <= 1 with string len <=1
//case 4: array with len <= 0
if (len > 1 && lines[0].length > 1){
col_lines = this.createMultiArray(lines)
}else if (len <= 1 && lines[0].length > 1){
col_lines = this.createNewArray(lines)
}else if (len > 1 && lines[0].length <=1){
col_lines = this.createSingleArray(lines)
}else if(len <= 0){
col_lines = []
}else{
throw 'ERROR:please stick to the prose of passage'
}
console.log('prose output: ', col_lines)
return col_lines;
}catch(err){
console.log('ERROR:please stick to the prose of passage')
return false
}
}
}
| eac52ee5672063eabb70e40f57c5a2cc36e6586e | [
"JavaScript"
] | 2 | JavaScript | murilloric/sh_coding_challenge | 9ae1c25809841ccbe6b0c459c863598ce25097ca | 9cfe167d93aea3c974743b93d494e9f2edae5fbd |
refs/heads/master | <repo_name>senzil/angular-datetime-range<file_sep>/README.md
# Datetime range and Time range input UI element
This directive is designed to provide easy and intuitive input of _**moment-range.js**_ datetime range objects.
Typically this can be used to represent a moment range with start and an end datetime objects.
Desgined to be as simple as possible to afford intuitive interactions, including scrolling.
Converted into an angular directive for your convenience :)
This work was based over the [angular-date-time](https://www.npmjs.com/package/angular-datetime-range) directive but forked to use an [moment-range.js](https://github.com/rotaready/moment-range) as ng-model
## Demo
Click <a href="https://rawgit.com/senzil/angular-datetime-range/master/" target="_blank">here</a> for a live demo.
## Installation
#####1) Install '@senzil/angular-datetime-range'
```
npm install @senzil/angular-datetime-range
yarn add @senzil/angular-datetime-range
https://cdn.rawgit.com/senzil/angular-datetime-range/v1.0.0/dist/datetime-range.min.js
https://cdn.rawgit.com/senzil/angular-datetime-range/v1.0.0/dist/datetime-range.min.css
```
#####2) Prerequisites
You must set up these dependencies
1. angular.js
1. moment.js
1. moment-range.js
1. moment-timezone.js
######NPM or YARN
if you use npm or yarn, the dependencies will download with the directive.
######In Browser
In the browser you must add the scripts
```html
<script src="https://cdnjs.cloudflare.com/ajax/libs/angular.js/1.6.5/angular.min.js" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.18.1/moment-with-locales.min.js" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment-timezone/0.5.13/moment-timezone-with-data.min.js" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment-range/3.0.3/moment-range.js" charset="utf-8"></script>
```
#####3) Add 'senzil.datetime-range' module to your app config
```javascript
angular.module('myApp', ['senzil.datetime-range'])
```
3) Use directives in a view
```html
<datetime-range ng-model="myDatetimeRange"></datetime-range>
```
[<img src="https://raw.githubusercontent.com/senzil/angular-datetime-range/master/docs/datetime-range.png" alt="Angular directive datetime range" width="300px">](https://rawgit.com/senzil/angular-datetime-range/master/)
```html
<time-range ng-model="myTimeRange"></time-range>
```
[<img src="https://raw.githubusercontent.com/senzil/angular-datetime-range/master/docs/time-range.png" alt="Angular directive time range" width="300px">](https://rawgit.com/senzil/angular-datetime-range/master/)
### Attributes
|Property | Usage | Default | Required |
|:------------- |:-------------|:-----:|:-----:|
| ng-model | DateRange (moment-range.js) object to bind from controller | none | yes |
| limits-range | DateRange (moment-range.js) object with bounds limits for the component| moment.range(null, null) - all time | no |
| timezone | Timezone string name (only datetime-range)| moment.tz.guess() | no |
| includeSeconds | Boolean - Show seconds in directive to set them | false | no |
| showClose | Boolean - Show close button in directive | false | no |
| on-change | Handler function that is fired on change of range object | none | no |
| on-change-start | Handler function that is fired on change of range.start moment object | none | no |
| on-change-end | Handler function that is fired on change of range.end moment object | none | no |
| on-close | Handler function that is fired on close of edit popover | none | no |
## Dependencies
* [AngularJS](https://angularjs.org/)
* [moment.js](http://momentjs.com/)
* [moment-timezone.js](https://momentjs.com/timezone/)
* [moment-range.js](https://github.com/gf3/moment-range)<file_sep>/gulpfile.babel.js
import gulp from 'gulp';
import del from 'del';
import gulpLoadPlugins from 'gulp-load-plugins';
import webpack from 'webpack-stream';
import runSequence from 'run-sequence';
let plugins = gulpLoadPlugins();
gulp.task('build', cb => runSequence('clean',['build:babel', 'build:css'], 'build:webpack', 'clean:tmp', cb));
gulp.task('build:webpack', () => gulp.src('.tmp/*.js')
.pipe(plugins.plumber())
.pipe(webpack({
output: {
filename: 'datetime-range.min.js',
libraryTarget: 'umd'
},
externals: {
"angular": "angular",
"moment": "moment",
"moment-range": "moment-range",
"moment-timezone": "moment-timezone"
}
}))
.pipe(plugins.uglify())
.pipe(gulp.dest('dist'))
.on('error', function (err) { console.error(err); }));
gulp.task('build:webpack:dev', () => gulp.src('.tmp/*.js')
.pipe(plugins.plumber())
.pipe(webpack({
output: {
filename: 'datetime-range.js',
libraryTarget: 'umd'
},
externals: {
"angular": "angular",
"moment": "moment",
"moment-range": "moment-range",
"moment-timezone": "moment-timezone"
}
}))
.pipe(gulp.dest('dist'))
.on('error', function (err) { console.error(err); }));
gulp.task('build:babel', () => gulp.src('src/*.js')
.pipe(plugins.plumber())
.pipe(plugins.babel())
.pipe(plugins.angularEmbedTemplates({logger:console.log, debug:true}))
.pipe(plugins.uglify())
.pipe(gulp.dest('.tmp'))
.on('error', function (err) { console.error(err); }));
gulp.task('build:css', () => gulp.src('src/*.css')
.pipe(plugins.plumber())
.pipe(plugins.cssnano())
.pipe(plugins.concat('datetime-range.min.css'))
.pipe(gulp.dest('dist')))
.on('error', function (err) { console.error(err); });
gulp.task('build:dev', () => {
gulp.src('src/*.css')
.pipe(plugins.plumber())
.pipe(gulp.dest('dist'));
return gulp.src('src/*.js')
.pipe(plugins.plumber())
.pipe(plugins.babel())
.pipe(plugins.angularEmbedTemplates({logger:console.log, debug:true}))
.pipe(gulp.dest('.tmp'))
.on('error', function (err) { console.error(err); });
});
gulp.task('clean', () => del('dist'));
gulp.task('clean:tmp', () => del('.tmp'));
gulp.task('dev:build', cb => runSequence('clean','build:dev', 'build:webpack:dev', 'clean:tmp', cb));
gulp.task('dev', cb => runSequence('dev:build', 'watch', cb));
gulp.task('watch', function() {
gulp.watch(['src/**/*.+(js|html|css)', 'index.*'], ['dev:build']);
});
<file_sep>/index.js
module.exports = require('./dist/datetime-range.min.js').default;
| 92d2f71f8e1ecae2135ee031175deed0f0633171 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | senzil/angular-datetime-range | 88c0e2d59a56f41dbd7db37706d3d24971f2ef77 | da91e35ff24e4779c98bfcc6d14b3949c9db95b4 |
refs/heads/master | <repo_name>Phoud/podcast<file_sep>/app/Media.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Media extends Model
{
protected $table = 'media';
public function download(){
return $this->hasMany('App\Download', 'post_id');
}
public function like(){
return $this->hasMany('App\Like', 'post_id');
}
public function category()
{
return $this->belongsTo('App\category');
}
public function User()
{
return $this->belongsTo('App\User');
}
public function media_tag()
{
return $this->hasMany('App\PostTag', 'post_id');
}
public function selectedTag(){
$text = [];
foreach($this->tags as $key =>$tag){
$text[] = $tag->tag_id;
}
return json_encode($text);
}
public function selectedGuest(){
$text = [];
foreach($this->guests as $key =>$guest){
$text[] = $guest->guest_id;
}
return json_encode($text);
}
public function tags(){
return $this->hasMany(PostTag::class, 'post_id');
}
public function guests(){
return $this->hasMany(GuestPost::class, 'post_id');
}
public function comments(){
return $this->hasMany('App\Comment', 'post_id');
}
}
<file_sep>/app/GuestPost.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class GuestPost extends Model
{
protected $fillable = ['post_id', 'guest_id', 'type'];
public function Media()
{
return $this->belongsTo('App\Media');
}
public function Guest()
{
return $this->belongsTo('App\Guest');
}
}
<file_sep>/app/Http/Controllers/AdminController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\category, App\blog, App\magazine;
use App\webimage, App\formcontact, App\contact, App\video;
use App\tag;
use App\Media;
use App\Guest, App\PostTag, App\GuestPost;
use Auth;
use App\User;
use Illuminate\Support\Facades\Hash;
class AdminController extends Controller
{
public function __construct()
{
$this->middleware('usermiddleware:super_admin,admin');
}
public function index()
{
$medias = Media::all();
$mags = magazine::all();
$users = User::where('Usertype', 'member')->get();
$news = blog::all();
$contacts = formcontact::all();
return view('admin.pages.index', compact('medias', 'mags', 'users', 'news', 'contacts'));
}
public function types()
{
$category = category::orderBy('id', 'desc')->get();
return view('admin.pages.type')->with('categorys', $category);
}
public function admin_blogs()
{
$blogs = blog::orderBy('id', 'desc')->get();
return view('admin.pages.blog.blog', compact('blogs'));
}
public function admin_magazines()
{
$show_magazine = magazine::orderBy('id', 'desc')->get();
return view('admin.pages.magazine.magazine')->with('show_magazines', $show_magazine);
}
public function getAddMagazine(){
return view('admin.pages.magazine.add');
}
public function admin_logo_websites()
{
$show_webimage = webimage::first();
return view('admin.pages.logo_web')->with('show_webimages', $show_webimage);
}
public function admin_form_contacts()
{
$show_formcontact = formcontact::orderBy('id', 'desc')->get();
return view('admin.pages.form_contact')->with('show_formcontacts', $show_formcontact);
}
public function admin_contacts()
{
$show_contact = contact::first();
return view('admin.pages.contact')->with('show_contacts', $show_contact);
}
# Insert Data Types
public function insert_types(Request $request)
{
$insert_type = new category;
$insert_type->name = $request->typename;
$insert_type->save();
return back();
}
public function edit_typess(Request $request, $id)
{
$edit_type = category::findOrfail($id);
$edit_type->name = $request->typename;
$edit_type->save();
return back();
}
public function delete_types($id)
{
$delete_type = category::findOrfail($id);
$delete_type->delete();
return back();
}
public function getAddNews(){
$tags = tag::all();
return view('admin.pages.blog.add', compact('tags'));
}
# Insert Data Blog
public function insert_blogs(Request $request)
{
$this->validate($request, [
'title' => 'required',
'image' => 'required|image',
'body' => 'required'
]);
if ($request->hasFile('image')) {
$file = $request->file('image');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/admins/blog/', $name);
$save = new blog;
$save->title = $request->title;
$save->body = $request->body;
$save->image = $name;
$save->save();
$tags = $request->tags;
if(isset($tags) && is_array($tags) && count($tags) > 0){
$oldtags = [];
$newtags = [];
foreach($tags as $key => $tag){
if(is_numeric($tag)){
$oldtags[] = $tag;
}else{
$newtags[] = $tag;
}
}
#insert new tag
if(count( $newtags ) > 0){
foreach($newtags as $newtag){
$dbtag = tag::where('name', $newtag)->first();
if(!isset($dbtag)){
$oldtags[] = tag::insertGetId([
'name'=> $newtag,
'created_at' => \Carbon\Carbon::now(),
'updated_at' => \Carbon\Carbon::now()
]);
}
}
}
//save all user input tag
if(count( $oldtags ) > 0){
foreach($oldtags as $oldtag){
$save->tags()->save(new PostTag([
'tag_id' => $oldtag
]));
}
}
}
return redirect()->route('admin.blog');
}
}
public function getUpdateNews($id){
$updates = blog::findOrfail($id);
$tags = tag::all();
return view('admin.pages.blog.update', compact('updates', 'tags'));
}
public function edit_blogs(Request $request, $id)
{
$this->validate($request, [
'title' => 'required',
'body' => 'required'
]);
$save = blog::findOrfail($id);
if ($request->hasFile('image')) {
$file = $request->file('image');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/admins/blog/', $name);
$save->title = $request->title;
$save->body = $request->body;
$save->image = $name;
$save->save();
}else{
$save->title = $request->title;
$save->body = $request->body;
$save->save();
}
$save->tags()->delete();
$tags = $request->tags;
if(isset($tags) && is_array($tags) && count($tags) > 0){
$oldtags = [];
$newtags = [];
foreach($tags as $key => $tag){
if(is_numeric($tag)){
$oldtags[] = $tag;
}else{
$newtags[] = $tag;
}
}
#insert new tag
if(count( $newtags ) > 0){
foreach($newtags as $newtag){
$dbtag = tag::where('name', $newtag)->first();
if(!isset($dbtag)){
$oldtags[] = tag::insertGetId([
'name'=> $newtag,
'created_at' => \Carbon\Carbon::now(),
'updated_at' => \Carbon\Carbon::now()
]);
}
}
}
//save all user input tag
if(count( $oldtags ) > 0){
foreach($oldtags as $oldtag){
$save->tags()->save(new PostTag([
'tag_id' => $oldtag
]));
}
}
}
return redirect()->route('admin.blog');
}
public function delete_blogs($id)
{
$delete_blog = blog::findOrfail($id);
$delete_blog->delete();
return back();
}
public function insert_magazines(Request $request)
{
$this->validate($request, [
'date_of_publish' => 'required',
'issue' => 'required',
'cover' => 'required|image',
'magazine' => 'required',
'description' => 'required'
]);
if ($request->hasFile('magazine') && $request->hasFile('cover')) {
$file = $request->file('magazine');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/magazine/', $name);
$cover = $request->file('cover');
$cover_name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $cover->getClientOriginalExtension();
$cover->move(public_path() . '/admins/magazine', $cover_name);
$insert_mag = new magazine;
$insert_mag->magazines = $name;
$insert_mag->cover = $cover_name;
$insert_mag->date_of_publish = $request->date_of_publish;
$insert_mag->issue = $request->issue;
$insert_mag->description = $request->description;
$insert_mag->save();
return redirect()->route('admin.magazine');
}
}
public function getUpdateMagazine($id){
$update = magazine::findOrfail($id);
return view('admin.pages.magazine.update', compact('update'));
}
public function edit_magazines(Request $request, $id)
{
$this->validate($request, [
'date_of_publish' => 'required',
'issue' => 'required',
'description' => 'required'
]);
if ($request->hasFile('magazine') && $request->hasFile('cover')) {
$file = $request->file('magazine');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/magazine/', $name);
$cover = $request->file('cover');
$cover_name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $cover->getClientOriginalExtension();
$cover->move(public_path() . '/admins/magazine', $cover_name);
$update = new magazine;
$update->magazines = $name;
$update->cover = $cover_name;
$update->date_of_publish = $request->date_of_publish;
$update->issue = $request->issue;
$update->description = $request->description;
$update->save();
return redirect()->route('admin.magazine');
}else if($request->hasFile('magazine')){
$file = $request->file('magazine');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/magazine/', $name);
$update = magazine::findOrfail($id);
$update->magazines = $name;
$update->date_of_publish = $request->date_of_publish;
$update->issue = $request->issue;
$update->description = $request->description;
$update->save();
return redirect()->route('admin.magazine');
}else if($request->hasFile('cover')){
$cover = $request->file('cover');
$cover_name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $cover->getClientOriginalExtension();
$cover->move(public_path() . '/admins/magazine', $cover_name);
$update = magazine::findOrfail($id);
$update->cover = $cover_name;
$update->date_of_publish = $request->date_of_publish;
$update->issue = $request->issue;
$update->description = $request->description;
$update->save();
return redirect()->route('admin.magazine');
}else{
$update = magazine::findOrfail($id);
$update->date_of_publish = $request->date_of_publish;
$update->issue = $request->issue;
$update->description = $request->description;
$update->save();
return redirect()->route('admin.magazine');
}
}
public function delete_magazines($id)
{
$delete_mag = magazine::findOrfail($id);
$delete_mag->delete();
return back();
}
public function insert_web_logos(Request $request)
{
$this->validate($request, [
'logo' => 'required|image'
]);
$insert_weblog = new webimage;
// Insert Logo User
if ($request->hasfile('logo')) {
$file = $request->file('logo');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/admins/weblogo/', $name);
$insert_weblog->logo = $name;
$insert_weblog->save();
return back();
}
}
public function edit_web_logos(Request $request, $id)
{
$this->validate($request, [
'logo' => 'required|image'
]);
$insert_weblog = webimage::find($id);
// Insert Logo User
if ($request->hasfile('logo')) {
$file = $request->file('logo');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/admins/weblogo/', $name);
$insert_weblog->logo = $name;
$insert_weblog->save();
return back();
}
}
public function insert_formcontacts(Request $request)
{
$insert_form_contact = new formcontact;
$insert_form_contact->name = $request->name;
$insert_form_contact->email = $request->email;
$insert_form_contact->message = $request->messages;
$insert_form_contact->save();
return back();
}
public function edit_formcontacts(Request $request, $id)
{
$edit_form_contact = formcontact::findOrfail($id);
$edit_form_contact->name = $request->name;
$edit_form_contact->email = $request->email;
$edit_form_contact->message = $request->messages;
$edit_form_contact->save();
return back();
}
public function delete_formcontacts($id)
{
$delete_form_contact = formcontact::findOrfail($id);
$delete_form_contact->delete();
return back();
}
public function insert_contacts(Request $request)
{
$insert_contact = new contact;
$insert_contact->tel = $request->tel;
$insert_contact->email = $request->email;
$insert_contact->address = $request->address;
$insert_contact->save();
return back();
}
public function edit_contacts(Request $request, $id)
{
$edit_contact = contact::findOrfail($id);
$edit_contact->tel = $request->tel;
$edit_contact->email = $request->email;
$edit_contact->address = $request->address;
$edit_contact->save();
return back();
}
# CRUD TAG
public function show_tags()
{
$show_tag = tag::orderBy('id', 'desc')->get();
return view('admin.pages.tag')->with('show_tags', $show_tag);
}
public function insert_tags(Request $request)
{
$rules = [
't_name' => 'required',
];
$inserttag = [
't_name.required' => 'ກະລຸນາປ້ອນເເທ໋ກກ່ອນ',
];
$sendrequest = $this->validate($request, $rules, $inserttag);
$insert_tag = new tag;
$insert_tag->tag_name = $request->t_name;
$insert_tag->save();
return back();
}
public function edit_tags(Request $request, $id)
{
$rules = [
't_name' => 'required',
];
$inserttag = [
't_name.required' => 'ກະລຸນາປ້ອນເເທ໋ກກ່ອນ',
];
$sendrequest = $this->validate($request, $rules, $inserttag);
$edit_tag = tag::findOrfail($id);
$edit_tag->tag_name = $request->t_name;
$edit_tag->save();
return back();
}
public function delete_tags($id)
{
$del_tag = tag::findOrfail($id);
$del_tag->delete();
return back();
}
# CRUD VIDEOS
public function admin_videos()
{
$videos = Media::where('mediaType', 'video')->get();
return view('admin.pages.videos.videos')->with('videos', $videos);
}
public function getAddVideo()
{
$tags = tag::all();
$cates = category::all();
$guests = Guest::all();
return view('admin.pages.videos.add', compact('tags','cates','guests'));
}
public function admin_insert_videos(Request $request)
{
$this->validate($request, [
'title' => 'required',
'image' => 'required|image',
'video' => 'required',
'description' => 'required',
'category' => 'required',
'guests' => 'required'
]);
$save = new Media;
// Insert Photos
if ($request->hasFile('image')) {
$file = $request->file('image');
$names = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $names);
$save->image = $names;
}
// Insert Podcast
if ($request->hasFile('video')) {
$file = $request->file('video');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $name);
$save->media = $name;
}
$save->title = $request->title;
$save->description = $request->description;
$save->mediaType = "video";
$save->cate_id = $request->category;
$save->save();
// Insert New Tag and Old tag
$tags = $request->tags;
if(isset($tags) && is_array($tags) && count($tags) > 0){
$oldtags = [];
$newtags = [];
foreach($tags as $key => $tag){
if(is_numeric($tag)){
$oldtags[] = $tag;
}else{
$newtags[] = $tag;
}
}
#insert new tag
if(count( $newtags ) > 0){
foreach($newtags as $newtag){
$dbtag = tag::where('name', $newtag)->first();
if(!isset($dbtag)){
$oldtags[] = tag::insertGetId([
'name'=> $newtag,
'created_at' => \Carbon\Carbon::now(),
'updated_at' => \Carbon\Carbon::now()
]);
}
}
}
//save all user input tag
if(count( $oldtags ) > 0){
foreach($oldtags as $oldtag){
$save->tags()->save(new PostTag([
'tag_id' => $oldtag,
'type' => 'video'
]));
}
}
}
// Guest save
$guests = $request->guests;
if(count($guests) > 0){
$oldguests = [];
foreach($guests as $key => $guest){
if(is_numeric($guest)){
$oldguests[] = $guest;
}
}
//save all user input tag
if(count( $oldguests ) > 0){
foreach($oldguests as $oldguest){
$save->guests()->save(new GuestPost([
'guest_id' => $oldguest,
'type' => 'video'
]));
}
}
}
return redirect()->route('admin.videos');
}
public function getUpdateVideo($id)
{
$update = Media::findOrfail($id);
$tags = tag::all();
$cates = category::all();
$guests = Guest::all();
return view('admin.pages.videos.update', compact('update','tags','cates','guests'));
}
public function updateVideo(Request $request, $id)
{
$this->validate($request, [
'title' => 'required',
'description' => 'required',
'category' => 'required',
'guests' => 'required'
]);
$update = Media::findOrfail($id);
// Insert Photos
if ($request->hasFile('image')) {
$file = $request->file('image');
$names = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $names);
$update->image = $names;
}
// Insert Podcast
if ($request->hasFile('video')) {
$file = $request->file('video');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $name);
$update->media = $name;
}
$update->title = $request->title;
$update->description = $request->description;
$update->mediaType = "video";
$update->cate_id = $request->category;
$update->save();
// Insert New Tag and Old tag
$update->tags()->delete();
$tags = $request->tags;
if(isset($tags) && is_array($tags) && count($tags) > 0){
$oldtags = [];
$newtags = [];
foreach($tags as $key => $tag){
if(is_numeric($tag)){
$oldtags[] = $tag;
}else{
$newtags[] = $tag;
}
}
#insert new tag
if(count( $newtags ) > 0){
foreach($newtags as $newtag){
$dbtag = tag::where('name', $newtag)->first();
if(!isset($dbtag)){
$oldtags[] = tag::insertGetId([
'name'=> $newtag,
'created_at' => \Carbon\Carbon::now(),
'updated_at' => \Carbon\Carbon::now()
]);
}
}
}
//save all user input tag
if(count( $oldtags ) > 0){
foreach($oldtags as $oldtag){
$update->tags()->save(new PostTag([
'tag_id' => $oldtag,
'type' => 'video'
]));
}
}
}
// Guest save
$update->guests()->delete();
$guests = $request->guests;
if(count($guests) > 0){
$oldguests = [];
foreach($guests as $key => $guest){
if(is_numeric($guest)){
$oldguests[] = $guest;
}
}
//save all user input tag
if(count( $oldguests ) > 0){
foreach($oldguests as $oldguest){
$update->guests()->save(new GuestPost([
'guest_id' => $oldguest,
'type' => 'video'
]));
}
}
}
return redirect()->route('admin.videos');
}
public function admin_delete_videos($id)
{
$delete_videos = Media::findOrfail($id);
$delete_videos->delete();
return back();
}
# CRUD PODCAST
public function admin_podcasts()
{
$podcasts = media::where('mediaType', 'podcast')->get();
return view('admin.pages.podcast.podcast', compact('podcasts'));
}
public function admin_show_add_podcasts()
{
$cates = category::all();
$tags = tag::all();
$guests = Guest::all();
return view('admin.pages.podcast.add_podcast', compact('tags', 'cates', 'guests'));
}
public function admin_insert_podcasts(Request $request)
{
$this->validate($request, [
'title' => 'required',
'image' => 'required|image',
'podcast' => 'required',
'description' => 'required',
'category' => 'required',
'guests' => 'required'
]);
$save = new Media;
// Insert Photos
if ($request->hasFile('image')) {
$file = $request->file('image');
$names = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $names);
$save->image = $names;
}
// Insert Podcast
if ($request->hasFile('podcast')) {
$file = $request->file('podcast');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $name);
$save->media = $name;
}
$save->title = $request->title;
$save->description = $request->description;
$save->cate_id = $request->category;
$save->save();
// Insert New Tag and Old tag
$tags = $request->tags;
if(isset($tags) && is_array($tags) && count($tags) > 0){
$oldtags = [];
$newtags = [];
foreach($tags as $key => $tag){
if(is_numeric($tag)){
$oldtags[] = $tag;
}else{
$newtags[] = $tag;
}
}
#insert new tag
if(count( $newtags ) > 0){
foreach($newtags as $newtag){
$dbtag = tag::where('name', $newtag)->first();
if(!isset($dbtag)){
$oldtags[] = tag::insertGetId([
'name'=> $newtag,
'created_at' => \Carbon\Carbon::now(),
'updated_at' => \Carbon\Carbon::now()
]);
}
}
}
//save all user input tag
if(count( $oldtags ) > 0){
foreach($oldtags as $oldtag){
$save->tags()->save(new PostTag([
'tag_id' => $oldtag,
'type' => 'podcast'
]));
}
}
}
// Guest save
$guests = $request->guests;
if(count($guests) > 0){
$oldguests = [];
foreach($guests as $key => $guest){
if(is_numeric($guest)){
$oldguests[] = $guest;
}
}
//save all user input tag
if(count( $oldguests ) > 0){
foreach($oldguests as $oldguest){
$save->guests()->save(new GuestPost([
'guest_id' => $oldguest
]));
}
}
}
return redirect()->route('admin.podcast');
}
public function getUpdatePodcast($id){
$update = Media::findOrfail($id);
$cates = category::all();
$tags = tag::all();
$guests = Guest::all();
return view('admin.pages.podcast.update', compact('update', 'tags', 'cates', 'guests'));
}
public function updatePodcast(Request $request, $id)
{
$this->validate($request, [
'title' => 'required',
'description' => 'required',
'category' => 'required',
'guests' => 'required'
]);
$update = Media::findOrfail($id);
// Insert Photos
if ($request->hasFile('image')) {
$file = $request->file('image');
$names = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $names);
$update->image = $names;
}
// Insert Podcast
if ($request->hasFile('podcast')) {
$file = $request->file('podcast');
$name = time() . $file->getClientOriginalName();
$file->move(public_path() . '/admins/media/', $name);
$update->media = $name;
}
$update->title = $request->title;
$update->description = $request->description;
$update->cate_id = $request->category;
$update->save();
// Insert New Tag and Old tag
$update->tags()->delete();
$tags = $request->tags;
if(isset($tags) && is_array($tags) && count($tags) > 0){
$oldtags = [];
$newtags = [];
foreach($tags as $key => $tag){
if(is_numeric($tag)){
$oldtags[] = $tag;
}else{
$newtags[] = $tag;
}
}
#insert new tag
if(count( $newtags ) > 0){
foreach($newtags as $newtag){
$dbtag = tag::where('name', $newtag)->first();
if(!isset($dbtag)){
$oldtags[] = tag::insertGetId([
'name'=> $newtag,
'created_at' => \Carbon\Carbon::now(),
'updated_at' => \Carbon\Carbon::now()
]);
}
}
}
//save all user input tag
if(count( $oldtags ) > 0){
foreach($oldtags as $oldtag){
$update->tags()->save(new PostTag([
'tag_id' => $oldtag,
'type' => 'podcast'
]));
}
}
}
// Guest save
$update->guests()->delete();
$guests = $request->guests;
if(count($guests) > 0){
$oldguests = [];
foreach($guests as $key => $guest){
if(is_numeric($guest)){
$oldguests[] = $guest;
}
}
//save all user input tag
if(count( $oldguests ) > 0){
foreach($oldguests as $oldguest){
$update->guests()->save(new GuestPost([
'guest_id' => $oldguest
]));
}
}
}
return redirect()->route('admin.podcast');
}
public function admin_delete_podcasts($id)
{
$delete_podcast = Media::findOrfail($id);
$delete_podcast->delete();
return back();
}
// Guest
public function getGuest(){
$guests = Guest::all();
return view('admin.pages.guest.guest', compact('guests'));
}
public function getAddGuest(){
return view('admin.pages.guest.add');
}
public function insertGuest(Request $request){
$this->validate($request, [
'name' => 'required',
'position' => 'required',
'photo' => 'required|image'
]);
if($request->hasFile('photo')){
$file = $request->file('photo');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/admins/guest/', $name);
$save = new Guest;
$save->photo = $name;
$save->name = $request->name;
$save->position = $request->position;
$save->save();
return redirect()->route('admin.getGuest');
}
}
public function deleteGuest($id){
$delete = Guest::findOrfail($id);
$delete->delete();
return back();
}
public function getUpdateGuest($id){
$update = Guest::findOrfail($id);
return view('admin.pages.guest.update', compact('update'));
}
public function updateGuest(Request $request, $id){
$this->validate($request, [
'name' => 'required',
'position' => 'required'
]);
if($request->hasFile('photo')){
$file = $request->file('photo');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/admins/guest/', $name);
$save = Guest::findOrfail($id);
$save->photo = $name;
$save->name = $request->name;
$save->position = $request->position;
$save->save();
return redirect()->route('admin.getGuest');
}else{
$save = Guest::findOrfail($id);
$save->name = $request->name;
$save->position = $request->position;
$save->save();
return redirect()->route('admin.getGuest');
}
}
// user
public function getUsers(){
$users = User::where('Usertype', 'member')->get();
return view('admin.pages.user.users', compact('users'));
}
public function addUsers(){
return view('admin.pages.user.add');
}
public function userSignup(Request $request){
$this->validate($request, [
'name' => 'required',
'email' => 'required:email',
'password' => '<PASSWORD>'
]);
if($request->password == $request->confirmpassword){
$save = new User;
$save->name = $request->name;
$save->email = $request->email;
$save->Usertype = 'member';
$save->password = <PASSWORD>($request->password);
if($request->hasfile('profile')){
$file = $request->file('profile');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/home/images/', $name);
$save->profile = $name;
}
$save->save();
return redirect()->route('getUsers');
}
}
public function getUpdateUser($id){
$user = User::findOrfail($id);
return view('admin.pages.user.update', compact('user'));
}
public function userUpdate(Request $request, $id){
$this->validate($request, [ 'name' => 'required', 'email' => 'required']);
$update = User::findOrFail($id);
$update->name = $request->name;
$update->email = $request->email;
if($request->hasFile('profile') && $request->password == '' && $request->confirmpassword == ''){
$file = $request->file('profile');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/home/images/', $name);
$update->profile = $name;
$update->save();
return redirect()->route('getUsers');
}
if($request->password == $request->confirmpassword && !$request->password == '' && !$request->confirmpassword == ''){
$update->password = Hash::make($request->confirmpassword);
$update->save();
}
$update->save();
return redirect()->route('getUsers');
}
public function userDelete($id){
$delete = User::findOrfail($id);
$delete->delete();
return back();
}
public function contactDetail($id){
$contact = formcontact::findOrfail($id);
return view('admin.pages.contactdetail', compact('contact'));
}
}
<file_sep>/routes/admin.php
<?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::prefix('admin')->middleware('auth')->group(function () {
Route::get('/', 'AdminController@index');
Route::get('/type','AdminController@types')->name('admin.type');
Route::get('/admin_blog','AdminController@admin_blogs')->name('admin.blog');
Route::get('/admin_magazine','AdminController@admin_magazines')->name('admin.magazine');
Route::get('/admin_logo_website','AdminController@admin_logo_websites')->name('admin.logo.web');
Route::get('/admin_form_contact','AdminController@admin_form_contacts')->name('admin.form.contact');
Route::get('/admin_contact','AdminController@admin_contacts')->name('admin.contact');
Route::get('/contactdetail/{id}', 'AdminController@contactDetail')->name('contactDetail');
Route::post('/insert_type','AdminController@insert_types')->name('admin.insert.type');
Route::post('/edit_type/{id}','AdminController@edit_typess')->name('admin.edits.type');
Route::post('/delete_type/{id}','AdminController@delete_types')->name('admin.delete.type');
Route::post('/insert_blog','AdminController@insert_blogs')->name('admin.insert.blog');
Route::get('/news-add', 'AdminController@getAddNews')->name('admin.getAddNews');
Route::get('/news-update/{id}', 'AdminController@getUpdateNews')->name('admin.getUpdateNews');
Route::post('/edit_blog/{id}','AdminController@edit_blogs')->name('admin.edit.blog');
Route::post('/delete_blog/{id}','AdminController@delete_blogs')->name('admin.delete.blog');
Route::post('/insert_magazine','AdminController@insert_magazines')->name('admin.insert.magazine');
Route::get('/add-magazine', 'AdminController@getAddMagazine')->name('admin.getAddMagazine');
Route::get('/update-magazine/{id}', 'AdminController@getUpdateMagazine')->name('admin.getUpdateMagazine');
Route::post('/edit_magazine/{id}','AdminController@edit_magazines')->name('admin.edit.magazine');
Route::get('/delete_magazine/{id}','AdminController@delete_magazines')->name('admin.delete.magazine');
Route::post('/insert_web_logo','AdminController@insert_web_logos')->name('admin.insert.web.logo');
Route::post('/edit_web_logo/{id}','AdminController@edit_web_logos')->name('admin.edit.web.logo');
Route::post('/insert_formcontact','AdminController@insert_formcontacts')->name('admin.insert.formcontact');
Route::post('/edit_formcontact/{id}','AdminController@edit_formcontacts')->name('admin.edit.formcontact');
Route::post('/delete_formcontact/{id}','AdminController@delete_formcontacts')->name('admin.delete.formcontact');
Route::post('/insert_contact','AdminController@insert_contacts')->name('admin.insert.contact');
Route::post('/edit_contact/{id}','AdminController@edit_contacts')->name('admin.edit.contact');
Route::get('/show_tag','AdminController@show_tags')->name('admin.show.tag');
Route::post('/insert_tag','AdminController@insert_tags')->name('admin.insert.tag');
Route::post('/edit_tag/{id}','AdminController@edit_tags')->name('admin.edit.tag');
Route::post('/delete_tag/{id}','AdminController@delete_tags')->name('admin.delete.tag');
Route::get('/video','AdminController@admin_videos')->name('admin.videos');
Route::get('/add-video','AdminController@getAddVideo')->name('admin.getAddVideo');
Route::post('/admin_insert_video','AdminController@admin_insert_videos')->name('admin.insert.videos');
Route::get('/update-video/{id}','AdminController@getUpdateVideo')->name('admin.getUpdateVideo');
Route::post('/update-video/{id}','AdminController@updateVideo')->name('admin.updateVideo');
Route::post('/admin_delete_video/{id}','AdminController@admin_delete_videos')->name('admin.delete.videos');
Route::get('/admin_podcast','AdminController@admin_podcasts')->name('admin.podcast');
Route::get('/admin_show_add_podcast','AdminController@admin_show_add_podcasts')->name('admin.show.add_podcast');
Route::post('/admin_insert_podcast','AdminController@admin_insert_podcasts')->name('admin.insert.podcast');
Route::get('/podcast-update/{id}','AdminController@getUpdatePodcast')->name('admin.getUpdatePodcast');
Route::post('/podcast-update/{id}','AdminController@updatePodcast')->name('admin.updatePodcast');
Route::get('/admin_delete_podcast/{id}','AdminController@admin_delete_podcasts')->name('admin.delete.podcast');
// User
Route::get('/users', 'AdminController@getUsers')->name('getUsers');
Route::get('/add-uers', 'AdminController@addUsers')->name('addUsers');
Route::post('/add-users', 'AdminController@userSignup')->name('userSignup');
Route::get('/update-user/{id}', 'AdminController@getUpdateUser')->name('getUpdateUser');
Route::post('/update-user/{id}', 'AdminController@userUpdate')->name('userUpdate');
Route::get('delete-user/{id}', 'AdminController@userDelete')->name('userDelete');
// Guest
Route::get('/guest', 'AdminController@getGuest')->name('admin.getGuest');
Route::get('/add-guest', 'AdminController@getAddGuest')->name('admin.getAddGuest');
Route::post('/add-guest', 'AdminController@insertGuest')->name('admin.insertGuest');
Route::post('/delete-guest/{id}', 'AdminController@deleteGuest')->name('admin.deleteGuest');
Route::get('/update-guest/{id}', 'AdminController@getUpdateGuest')->name('admin.getUpdateGuest');
Route::post('update-guest/{id}', 'AdminController@updateGuest')->name('admin.updateGuest');
//Reports
Route::get('/user-report', 'ReportController@userReport')->name('userReport');
Route::get('/user-report-generate', 'ReportController@userReportGenerate')->name('userReportGenerate');
Route::get('/podcast-report-generate', 'ReportController@podcastReportGenerate')->name('podcastReportGenerate');
Route::get('/video-report-generate', 'ReportController@videoReportGenerate')->name('videoReportGenerate');
Route::get('/magazine-report-generate', 'ReportController@magazineReportGenerate')->name('magazineReportGenerate');
Route::get('/topdownload-report-generate', 'ReportController@topdownloadReportGenerate')->name('topdownloadReportGenerate');
Route::get('/toplike-report-generate', 'ReportController@toplikeReportGenerate')->name('toplikeReportGenerate');
Route::get('/podcast-report', 'ReportController@podcastReport')->name('podcastReport');
Route::get('/video-report', 'ReportController@videoReport')->name('videoReport');
Route::get('/magazine-report', 'ReportController@magazineReport')->name('magazineReport');
Route::get('/top-download', 'ReportController@topDownload')->name('topDownload');
Route::get('/top-like', 'ReportController@likeReport')->name('likeReport');
});
<file_sep>/app/Http/Controllers/HomeController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Media, App\tag, App\category;
use App\Guest, App\GuestPost;
use App\blog;
use App\contact, App\formcontact;
use Mail;
use App\User;
use Illuminate\Support\Facades\Hash;
use Session;
use App\Comment;
use Auth;
use App\Like;
use App\magazine;
use App\Download;
class HomeController extends Controller
{
public function index()
{
$latest = Media::orderBy('id', 'desc')->first();
$videos = Media::where('mediaType', 'video')->orderBy('id', 'desc')->first();
$podcasts = Media::where('mediaType', 'podcast')->limit(3)->orderBy('id', 'desc')->get();
$feature = Media::where('mediaType', 'podcast')->orderBy('id', 'desc')->first();
$magazines = magazine::limit(4)->orderBy('id', 'desc')->get();
return view('home.pages.index', compact('podcasts', 'videos', 'magazines', 'feature', 'latest'));
}
public function about()
{
return view('home.pages.about');
}
public function episodes(Request $request)
{
$tags = tag::all();
$cates = category::all();
$podcasts = $this->searchMedia($request, 'podcast');
return view('home.pages.episodes', compact('podcasts', 'tags', 'cates'));
}
public function episode($id)
{
$episode = GuestPost::where('post_id', $id)->get();
$media = Media::where('id', $id)->first();
$tags = tag::all();
$cates = category::all();
Session::flash('login_reload', 'yes');
return view('home.pages.episode', compact('episode', 'tags', 'cates','media'));
}
public function blog(Request $request)
{
$tags = tag::all();
$cates = category::all();
$blogs = $this->searchMedia($request, 'blog');
return view('home.pages.compose.blog', compact('blogs', 'tags', 'cates'));
}
public function read($id){
$read = blog::findOrFail($id);
$blogs = blog::where('id', '!=', $id)->get();
return view('home.pages.compose.read', compact('read', 'blogs'));
}
public function contact()
{
$contact = contact::first();
return view('home.pages.contact', compact('contact'));
}
public function videos(Request $request){
$tags = tag::all();
$cates = category::all();
$videos = $this->searchMedia($request, 'video');
return view('home.pages.videos', compact('videos', 'tags', 'cates'));
}
public function video($id){
$video = GuestPost::where('post_id', $id)->get();
$media = Media::where('id', $id)->first();
$tags = tag::all();
$cates = category::all();
return view('home.pages.video', compact('video', 'tags', 'cates', 'media'));
}
public function contactForm(Request $request){
$this->validate($request, [
'name' => 'required',
'email' => 'required',
'subject' => 'required',
'message' => 'required'
]);
$save = new formcontact;
$save->name = $request->name;
$save->email = $request->email;
$save->subject = $request->subject;
$save->message = $request->message;
$save->save();
$data = [];
$data['name'] = $save->name;
$data['email'] = $save->email;
$data['subject'] = $save->subject;
$data['messagebody'] = $save->message;
Mail::send('home.mail', $data, function ($message) use ($data) {
$message->from('<EMAIL>');
$message->to('<EMAIL>');
$message->subject('Message from 108Megaheard user.');
});
return back();
}
public function getSignup(){
return view('home.pages.member.signup');
}
public function memberSignup(Request $request){
$this->validate($request, [
'name' => 'required',
'email' => 'required:email|unique:users',
'password' => '<PASSWORD>'
]);
if($request->password == $request->confirmpassword){
$save = new User;
$save->name = $request->name;
$save->email = $request->email;
$save->Usertype = 'member';
$save->password = <PASSWORD>($request->password);
if($request->hasfile('profile')){
$file = $request->file('profile');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/home/images/', $name);
$save->profile = $name;
}
$save->save();
auth()->login($save);
return back();
}
return back()->withErrors(['error'=> 'ລະຫັດຜ່ານບໍ່ກົງກັນ']);
}
public function postComment(Request $request){
$this->validate($request, [
'comment' => 'required'
]);
$post = new Comment;
$post->comment = $request->comment;
$post->post_id = $request->post_id;
$post->user_id = Auth::user()->id;
$post->save();
return back();
}
public function getDonwload($id)
{
if(Auth::check()){
$download = Media::where('id', '=', $id)->firstOrFail();
$save = new Download;
$save->user_id = Auth::user()->id;
$save->post_id = $id;
$save->save();
$pathToFile= public_path()."/admins/media/". $download->media;
return response()->download($pathToFile);
}else{
return back()->withErrors('error', 'ກະລຸນາເຂົ້າສູ່ລະບົບກ່ອນຈຶ່ງສາມາດ Download ໄດ້');
}
}
public function getProfile(){
$tags = tag::all();
$cates = category::all();
$profile = User::where('id', Auth::user()->id)->first();
return view('home.pages.member.member', compact('tags', 'cates', 'profile'));
}
public function getProfileUpdate($id){
$update = User::findOrFail($id);
return view('home.pages.member.update', compact('update'));
}
public function profileUpdate(Request $request, $id){
$this->validate($request, [ 'name' => 'required', 'email' => 'required']);
$update = User::findOrFail($id);
$update->name = $request->name;
$update->email = $request->email;
if($request->hasFile('profile') && $request->password == '' && $request->confirmpassword == ''){
$file = $request->file('profile');
$name = md5(date('Y-m-d h:m:s') . microtime()) . time() . '_attach_.' . $file->getClientOriginalExtension();
$file->move(public_path() . '/home/images/', $name);
$update->profile = $name;
$update->save();
return redirect()->route('getProfile');
}
if($request->password == $request->confirmpassword && !$request->password == '' && !$request->confirmpassword == ''){
$update->password = <PASSWORD>::make($request->confirmpassword);
$update->save();
}
$update->save();
return redirect()->route('getProfile');
}
// Implementing Like
public function isLikedByMe($id)
{
$post = Media::findOrFail($id);
$like = Like::where('user_id', Auth::id())->where('post_id', $post->id)->first();
if (isset($like)){
return 'true';
}
return 'false';
}
public function like(Request $request)
{
$post = Media::find($request->id);
if(!isset( $post ) || !Auth::check()){
return ['success' => false];
}
$existing_like = Like::withTrashed()->wherePostId($post->id)->whereUserId(Auth::id())->first();
if (is_null($existing_like)) {
Like::create([
'post_id' => $post->id,
'user_id' => Auth::id()
]);
} else {
if (is_null($existing_like->deleted_at)) {
$existing_like->delete();
} else {
$existing_like->restore();
}
}
$likeCount = Like::where('post_id', $post->id)->get()->count();
return ['success' => true, 'count' => $likeCount];
}
public function getMagazine(Request $request){
$tags = tag::all();
$cates = category::all();
$magazines = $this->searchMedia($request, 'mag');
return view('home.pages.magazine.magazines', compact('tags', 'cates', 'magazines'));
}
public function viewMagazine($id){
$mag = magazine::findOrfail($id);
return view('home.pages.magazine.magazine', compact('mag'));
}
public function searchMedia(Request $request, $type){
$query = $request->get('query');
switch ($type) {
case 'mag':
$mags = magazine::orderBy('id', 'desc')->paginate(6);
if(!empty($query)){
$mags = magazine::orderBy('id', 'desc');
$mags->where('issue', 'like', "%{$query}%");
$mags = $mags->paginate(6);
}
return $mags;
break;
case 'podcast':
$podcasts = Media::where('mediaType', 'podcast')->orderBy('id', 'desc')->paginate(6);
if(!empty($query)){
$podcasts = Media::where('mediaType', 'podcast')->orderBy('id', 'desc');
$podcasts->where('title', 'like', "%{$query}%");
$podcasts = $podcasts->paginate(6);
}
return $podcasts;
break;
case 'video':
$videos = Media::where('mediaType', 'video')->orderBy('id', 'desc')->paginate(6);
if(!empty($query)){
$videos = Media::where('mediaType', 'video')->orderBy('id', 'desc');
$videos->where('title', 'like', "%{$query}%");
$videos = $videos->paginate(6);
}
return $videos;
break;
case 'blog':
$blogs = blog::orderBy('id', 'desc')->paginate(6);
if(!empty($query)){
$blogs = blog::orderBy('id', 'desc');
$blogs->where('title', 'like', "%{$query}%");
$blogs = $blogs->paginate(6);
}
return $blogs;
break;
default:
# code...
break;
}
}
}
<file_sep>/app/Http/Controllers/ReportController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\User;
use App\Media;
use App\magazine;
use App\Download;
use DB, PDF;
class ReportController extends Controller
{
public function __construct()
{
$this->middleware('usermiddleware:super_admin,admin');
}
public function userReport(){
$users = User::where('Usertype', 'member')->get();
return view('admin.pages.report.user.user', compact('users'));
}
public function userReportGenerate(){
$users = User::where('Usertype', 'member')->get();
// return view('admin.pages.report.user.user-generate', compact('users'));
$generate_pdf = PDF::loadView('admin.pages.report.user.user-generate', ['users' => $users]);
return $generate_pdf->download('user-report-' . md5(uniqid('user-report', true)) . '.pdf');
}
public function podcastReport(){
$podcasts = Media::where('mediaType', 'podcast')->get();
return view('admin.pages.report.podcast.podcast', compact('podcasts'));
}
public function podcastReportGenerate(){
$podcasts = Media::where('mediaType', 'podcast')->get();
// return view('admin.pages.report.user.user-generate', compact('users'));
$generate_pdf = PDF::loadView('admin.pages.report.podcast.podcast-generate', ['podcasts' => $podcasts]);
return $generate_pdf->download('podcast-report-' . md5(uniqid('podcast-report', true)) . '.pdf');
}
public function videoReport(){
$videos = Media::where('mediaType', 'video')->get();
return view('admin.pages.report.video.video', compact('videos'));
}
public function videoReportGenerate(){
$videos = Media::where('mediaType', 'video')->get();
// return view('admin.pages.report.user.user-generate', compact('users'));
$generate_pdf = PDF::loadView('admin.pages.report.video.video-generate', ['videos' => $videos]);
return $generate_pdf->download('video-report-' . md5(uniqid('video-report', true)) . '.pdf');
}
public function magazineReport(){
$mags = magazine::all();
return view('admin.pages.report.magazine.magazine', compact('mags'));
}
public function magazineReportGenerate(){
$mags = magazine::all();
// return view('admin.pages.report.user.user-generate', compact('users'));
$generate_pdf = PDF::loadView('admin.pages.report.magazine.magazine-generate', ['mags' => $mags]);
return $generate_pdf->download('magazine-report-' . md5(uniqid('magazine-report', true)) . '.pdf');
}
public function topDownload(){
$media = Media::select('media.*', DB::raw('count(downloads.post_id) as download_count'))->leftJoin('downloads', 'downloads.post_id', 'media.id')->groupBy('media.id')->orderBy('download_count', 'desc')->get();
return view('admin.pages.report.ranking.topdownload', compact('media'));
}
public function topdownloadReportGenerate(){
$media = Media::select('media.*', DB::raw('count(downloads.post_id) as download_count'))->leftJoin('downloads', 'downloads.post_id', 'media.id')->groupBy('media.id')->orderBy('download_count', 'desc')->get();
// return view('admin.pages.report.user.user-generate', compact('users'));
$generate_pdf = PDF::loadView('admin.pages.report.ranking.topdownload-generate', ['media' => $media]);
return $generate_pdf->download('topdownload-report-' . md5(uniqid('topdownload-report', true)) . '.pdf');
}
public function likeReport(){
$media = Media::select('media.*', DB::raw('count(likes.post_id) as like_count'))->leftJoin('likes', 'likes.post_id', 'media.id')->groupBy('media.id')->orderBy('like_count', 'desc')->get();
return view('admin.pages.report.like.toplike', compact('media'));
}
public function toplikeReportGenerate(){
$media = Media::select('media.*', DB::raw('count(likes.post_id) as like_count'))->leftJoin('likes', 'likes.post_id', 'media.id')->groupBy('media.id')->orderBy('like_count', 'desc')->get();
// return view('admin.pages.report.user.user-generate', compact('users'));
$generate_pdf = PDF::loadView('admin.pages.report.like.toplike-generate', ['media' => $media]);
return $generate_pdf->download('toplike-report-' . md5(uniqid('toplike-report', true)) . '.pdf');
}
}
<file_sep>/database/migrations/2019_05_04_041212_create_guest_posts_table.php
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateGuestPostsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('guest_posts', function (Blueprint $table) {
$table->bigIncrements('id');
$table->bigInteger('post_id')->unsigned();
$table->bigInteger('guest_id')->unsigned();
$table->enum('type', ['podcast', 'video'])->defaullt('podcast');
$table->foreign('guest_id')->references('id')->on('guests')->onUpdate('cascade')->onDelete('cascade');
$table->foreign('post_id')->references('id')->on('media')->onUpdate('cascade')->onDelete('cascade');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('guest_posts', function(){
$table->dropForeign('guest_id');
$table->dropForeign('post_id');
});
Schema::dropIfExists('guest_posts');
}
}
<file_sep>/routes/home.php
<?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', 'HomeController@index');
Route::get('/about', 'HomeController@about')->name('about');
Route::get('/episodes', 'HomeController@episodes')->name('episodes');
Route::get('/episode/{id}', 'HomeController@episode')->name('episode');
Route::get('/blog', 'HomeController@blog')->name('blog');
Route::get('/contact', 'HomeController@contact')->name('contact');
Route::post('/contactform', 'HomeController@contactForm')->name('contactForm');
Route::get('/videos', 'HomeController@videos')->name('videos');
Route::get('/video/{id}', 'HomeController@video')->name('video');
Route::get('/read/{id}', 'HomeController@read')->name('read');
Route::get('/signup', 'HomeController@getSignup')->name('getSignup')->middleware('guest');
Route::post('/signup', 'HomeController@memberSignup')->name('memberSignup');
Route::post('/comment', 'HomeController@postComment')->name('postComment');
Route::get('/download/{id}', 'HomeController@getDonwload')->name('getDonwload');
Route::get('/profile', 'HomeController@getProfile')->name('getProfile');
Route::get('/profile/{id}', 'HomeController@getProfileUpdate')->name('getProfileUpdate');
Route::post('/update-profile/{id}', 'HomeController@profileUpdate')->name('profileUpdate');
Route::get('/magazines', 'HomeController@getMagazine')->name('getMagazine');
Route::get('/magazine/{id}', 'HomeController@viewMagazine')->name('viewMagazine');
Route::get('/search', 'HomeController@searchMedia')->name('searchMedia');
// Login
Route::get('/admin/login', 'Auth\LoginController@getLogin')->name('getLogin');
Route::post('/login', 'Auth\LoginController@login')->name('login');
Route::get('/logout', 'Auth\LoginController@logout')->name('logout');
// End Login
// Stream MP3
Route::get('/stream/{filename}', 'ServeFileController@streamSong')->name('streamSong');
// Like
Route::get('post/{id}/islikedbyme', 'HomeController@isLikedByMe');
Route::post('post/like', 'HomeController@like');
<file_sep>/app/blog.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class blog extends Model
{
protected $table = 'blogs';
public function tags(){
return $this->hasMany(PostTag::class, 'post_id');
}
public function selectedTag(){
$text = [];
foreach($this->tags as $key =>$tag){
$text[] = $tag->tag_id;
}
return json_encode($text);
}
}
<file_sep>/app/tag.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class tag extends Model
{
protected $table = 'tags';
public function videos_tag()
{
return $this->hasMany('App\videos_tag', 'tag_id');
}
public function podcast_tag()
{
return $this->hasMany('App\podcast_tag', 'tag_id');
}
}
<file_sep>/app/Guest.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Guest extends Model
{
protected $table = 'guests';
public function guests(){
return $this->hasMany('App\GuestPost', 'guest_id');
}
}
<file_sep>/app/webimage.php
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class webimage extends Model
{
protected $table = 'webimages';
}
<file_sep>/config/pdf.php
<?php
return [
'mode' => 'utf-8',
'format' => 'A4',
'author' => '',
'subject' => '',
'keywords' => '',
'margin_header' => 0,
'margin_footer' => 5,
'margin_top' => 10,
'margin_bottom' => 20,
'creator' => '108Recruit Pdf',
'display_mode' => 'fullpage',
'tempDir' => base_path('../temp/'),
'font_path' => base_path('public/pdf-fonts'),
'font_data' => [
'latoregular' => [
'R' => "Lato-Regular.ttf",
'B' => 'Lato-Bold.ttf',
],
'phetsarathot' => [
'R' => 'phetsarath_ot.ttf',
'B' => 'phetsarath_ot_bold_new.ttf',
'useOTL' => 0xFF,
'useKashida' => 75,
],
'saysetthot' => [
'R' => 'saysettha_OT.ttf',
'B' => 'saysettha_OT.ttf',
],
"dhyanalao" => [/* Lao fonts */
'R' => 'Dhyana-Regular.ttf',
'B' => "Dhyana-Bold.ttf",
'useOTL' => 0xFF,
],
]
];
<file_sep>/app/Http/Controllers/ServeFileController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Response, Request as RequestStatic;
class ServeFileController extends Controller
{
public function streamFile($contentType, $path, $filename) {
if ( ! file_exists($path)) {
return response(['message' => 'the file does not exists.'], $status = 201);
}
header('Content-type: ' . $contentType);
header('Content-Disposition: inline; filename="' . $filename . '"');
$fp = @fopen($path, 'rb');
$size = filesize($path); // File size
$length = $size; // Content length
$start = 0; // Start byte
$end = $size - 1; // End byte
// Now that we've gotten so far without errors we send the accept range header
/* At the moment we only support single ranges.
* Multiple ranges requires some more work to ensure it works correctly
* and comply with the spesifications: http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.2
*
* Multirange support annouces itself with:
* header('Accept-Ranges: bytes');
*
* Multirange content must be sent with multipart/byteranges mediatype,
* (mediatype = mimetype)
* as well as a boundry header to indicate the various chunks of data.
*/
//header("Accept-Ranges: 0-$length");
header('Accept-Ranges: bytes');
// multipart/byteranges
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.2
$range = RequestStatic::header('Range');
if (isset($range)){
$c_start = $start;
$c_end = $end;
// Extract the range string
list(, $range) = explode('=', $range, 2);
// Make sure the client hasn't sent us a multibyte range
if (strpos($range, ',') !== false){
// (?) Shoud this be issued here, or should the first
// range be used? Or should the header be ignored and
// we output the whole content?
header('HTTP/1.1 416 Requested Range Not Satisfiable');
header("Content-Range: bytes $start-$end/$size");
// (?) Echo some info to the client?
exit;
} // fim do if
// If the range starts with an '-' we start from the beginning
// If not, we forward the file pointer
// And make sure to get the end byte if spesified
if ($range{0} == '-'){
// The n-number of the last bytes is requested
$c_start = $size - substr($range, 1);
} else {
$range = explode('-', $range);
$c_start = $range[0];
$c_end = (isset($range[1]) && is_numeric($range[1])) ? $range[1] : $size;
} // fim do if
/* Check the range and make sure it's treated according to the specs.
* http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
*/
// End bytes can not be larger than $end.
$c_end = ($c_end > $end) ? $end : $c_end;
// Validate the requested range and return an error if it's not correct.
if ($c_start > $c_end || $c_start > $size - 1 || $c_end >= $size){
header('HTTP/1.1 416 Requested Range Not Satisfiable');
header("Content-Range: bytes $start-$end/$size");
// (?) Echo some info to the client?
exit;
} // fim do if
$start = $c_start;
$end = $c_end;
$length = $end - $start + 1; // Calculate new content length
fseek($fp, $start);
header('HTTP/1.1 206 Partial Content');
} // fim do if
// Notify the client the byte range we'll be outputting
header("Content-Range: bytes $start-$end/$size");
header("Content-Length: $length");
//remove header
header_remove("X-Powered-By");
// Start buffered download
$buffer = 1024 * 8;
while(!feof($fp) && ($p = ftell($fp)) <= $end){
if ($p + $buffer > $end){
// In case we're only outputtin a chunk, make sure we don't
// read past the length
$buffer = $end - $p + 1;
} // fim do if
set_time_limit(0); // Reset time limit for big files
echo fread($fp, $buffer);
flush(); // Free up memory. Otherwise large files will trigger PHP's memory limit.
} // fim do while
fclose($fp);
exit;
}
public function downloadFile($contentType, $path, $downloadFilename) {
if ( ! file_exists($path) ) {
return response()->json(['errors' => ['error' => ['the file does not exists.' ] ]], $status = 401);
}
header('Content-type: ' . $contentType);
$fp = @fopen($path, 'rb');
$size = filesize($path); // File size
$length = $size; // Content length
$start = 0; // Start byte
$end = $size - 1; // End byte
// Now that we've gotten so far without errors we send the accept range header
/* At the moment we only support single ranges.
* Multiple ranges requires some more work to ensure it works correctly
* and comply with the spesifications: http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.2
*
* Multirange support annouces itself with:
* header('Accept-Ranges: bytes');
*
* Multirange content must be sent with multipart/byteranges mediatype,
* (mediatype = mimetype)
* as well as a boundry header to indicate the various chunks of data.
*/
//header("Accept-Ranges: 0-$length");
header('Accept-Ranges: bytes');
// multipart/byteranges
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.2
$range = RequestStatic::header('Range');
if (isset($range)){
$c_start = $start;
$c_end = $end;
// Extract the range string
list(, $range) = explode('=', $range, 2);
// Make sure the client hasn't sent us a multibyte range
if (strpos($range, ',') !== false){
// (?) Shoud this be issued here, or should the first
// range be used? Or should the header be ignored and
// we output the whole content?
header('HTTP/1.1 416 Requested Range Not Satisfiable');
header("Content-Range: bytes $start-$end/$size");
// (?) Echo some info to the client?
exit;
} // fim do if
// If the range starts with an '-' we start from the beginning
// If not, we forward the file pointer
// And make sure to get the end byte if spesified
if ($range{0} == '-'){
// The n-number of the last bytes is requested
$c_start = $size - substr($range, 1);
} else {
$range = explode('-', $range);
$c_start = $range[0];
$c_end = (isset($range[1]) && is_numeric($range[1])) ? $range[1] : $size;
} // fim do if
/* Check the range and make sure it's treated according to the specs.
* http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
*/
// End bytes can not be larger than $end.
$c_end = ($c_end > $end) ? $end : $c_end;
// Validate the requested range and return an error if it's not correct.
if ($c_start > $c_end || $c_start > $size - 1 || $c_end >= $size){
header('HTTP/1.1 416 Requested Range Not Satisfiable');
header("Content-Range: bytes $start-$end/$size");
// (?) Echo some info to the client?
exit;
} // fim do if
$start = $c_start;
$end = $c_end;
$length = $end - $start + 1; // Calculate new content length
fseek($fp, $start);
header('HTTP/1.1 206 Partial Content');
} // fim do if
// Notify the client the byte range we'll be outputting
header("Content-Range: bytes $start-$end/$size");
header("Content-Length: $length");
header("Content-Disposition: attachment; filename=" . urlencode($downloadFilename));
header("Content-Description: File Transfer");
//remove header
header_remove("X-Powered-By");
flush(); // this doesn't really matter.
// Start buffered download
$buffer = 1024 * 8;
while(!feof($fp) && ($p = ftell($fp)) <= $end){
if ($p + $buffer > $end){
// In case we're only outputtin a chunk, make sure we don't
// read past the length
$buffer = $end - $p + 1;
} // fim do if
set_time_limit(0); // Reset time limit for big files
echo fread($fp, $buffer);
flush(); // Free up memory. Otherwise large files will trigger PHP's memory limit.
} // fim do while
fclose($fp);
exit;
}
public function streamVideo(Request $request, $filename, $type){
$path = public_path('/home/videos/' . $filename);
return $this->streamFile("video/$type", $path, $filename);
}
public function streamSong(Request $request, $filename){
$path = public_path('/admins/media/' . $filename);
return $this->streamFile("audio/mpeg", $path, $filename);
}
}
| 85f8044a81264cd41d561238e6993d39d039ea3a | [
"PHP"
] | 14 | PHP | Phoud/podcast | 0e2da899974b467c4e41ce37505428ad915e70b8 | b50aa174f4597c7460f56ca62d0f5877f96bdd17 |
refs/heads/master | <repo_name>prathameshtal/iOS-Swift-LoginApp<file_sep>/LoginApp/LoginViewController.swift
//
// LoginViewController.swift
// LoginApp
//
// Created by <NAME> on 04/07/16.
// Copyright © 2016 <NAME>. All rights reserved.
//
import UIKit
class LoginViewController: UIViewController {
let storedUsername = "qwerty";
let storedPassword = "<PASSWORD>";
@IBOutlet weak var usernameTextField: UITextField!
@IBOutlet weak var passwordTextField: UITextField!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBAction func submitButtonTapped(sender: AnyObject) {
let username = usernameTextField.text;
let password = <PASSWORD>;
var errorMessage = "";
var isError = false;
if(isEmptyFields(username!, password: password!)){
isError = true;
errorMessage = "Fields cannot be empty";
} else if(!isValidUserName(username!) || !isCorrectUserName(username!)){ // Wrong username
if(!isValidUserName(username!)){
errorMessage = "Minimum 6 charaters required";
} else if(!isCorrectPassword(password!)){ // Wrong password
errorMessage = "Wrong Username & Password";
} else {
errorMessage = "Wrong Username"
}
isError = true;
} else if(!isCorrectPassword(password!)){ // Wrong password
errorMessage = "Wrong Password";
isError = true;
} else { // Login is successful
saveCredentialsAndOpenWelcomeScreen();
isError = false;
}
if(isError){
showMessageAlertDialog(errorMessage)
}
}
func isEmptyFields(username: String, password: String) -> Bool {
return username.characters.count == 0 || password.characters.count == 0;
}
func isValidUserName(username: String) -> Bool {
return username.characters.count >= 6;
}
func isCorrectUserName(username: String) -> Bool {
return username == storedUsername;
}
func isCorrectPassword(password: String) -> Bool {
return password == storedPassword;
}
func saveCredentialsAndOpenWelcomeScreen(){
NSUserDefaults.standardUserDefaults().setBool(true, forKey: "isUserLoggedIn");
NSUserDefaults.standardUserDefaults().synchronize();
self.performSegueWithIdentifier("welcomeView", sender: self)
}
func showMessageAlertDialog(errorMessage: String){
let alertController = UIAlertController(title: "Login", message:
errorMessage, preferredStyle: UIAlertControllerStyle.Alert)
alertController.addAction(UIAlertAction(title: "Dismiss", style: UIAlertActionStyle.Default,handler: nil))
self.presentViewController(alertController, animated: true, completion: nil)
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
| 058cfbd20953efd31a809f78b4be9933cbf2ffa0 | [
"Swift"
] | 1 | Swift | prathameshtal/iOS-Swift-LoginApp | 48cef46e9d00ef449175e7c3a8636af15c1b8df9 | 7c14cc361691371373fca4c3bddbe122c2957b7f |
refs/heads/master | <repo_name>Serg-Maximchuk/eshop<file_sep>/src/com/mydomain/shoppingcart/service/impl/ShoppingManager.java
package com.mydomain.shoppingcart.service.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.mydomain.shoppingcart.bo.Basket;
import com.mydomain.shoppingcart.bo.Item;
import com.mydomain.shoppingcart.service.ShoppingService;
/**
* @author Ross
*/
public class ShoppingManager implements ShoppingService {
private Basket basket;
private Item item;
public Collection<Item> findItems() {
// ideally you would make a call to the database to return list of items in the shop
// so for now we will just return stub data
List<Item> allItems = new ArrayList<Item>();
Item item1 = new Item(1l, "Candy Cotton", "Candy coated milky tarts", 8.50d);
Item item2 = new Item(2l, "Jelly Beans", "Jelly icecream waffle cream", 18.99d);
Item item3 = new Item(3l, "Jam Doughnut", "Strawberry jam and Christmas pudding", 23.00d);
Item item4 = new Item(4l, "Mallow Madness", "Marshmellow wrap", 8.50d);
Item item5 = new Item(5l, "Chocolate Cheese", "Crunchy chocolate creamy cheese", 17.50d);
Item item6 = new Item(6l, "Custard Crazy", "Custard sauce with jelly and cream", 13.55d);
allItems.add(item1);
allItems.add(item2);
allItems.add(item3);
allItems.add(item4);
allItems.add(item5);
allItems.add(item6);
return allItems;
}
public Basket getBasket() {
return basket;
}
public Item getItem() {
return item;
}
public void setBasket(Basket theBasket) {
basket = theBasket;
}
public void setItem(Item theItem) {
item = theItem;
}
}<file_sep>/README.md
# eshop
Internet shop.
Dev stage. Coming soon.
<file_sep>/src/com/mydomain/shoppingcart/bo/Basket.java
package com.mydomain.shoppingcart.bo;
import java.util.ArrayList;
import java.util.List;
/**
* @author Ross
*/
public class Basket {
private long id;
private List<Item> items;
public void addItem(Item item) {
getItems().add(item);
}
public void empty() {
setItems(new ArrayList<Item>());
}
public double getBalance() {
double balance = 0;
for (Item item : getItems()) {
balance = balance + item.getPrice();
}
return balance;
}
public long getId() {
return id;
}
public int getItemCount() {
return getItems().size();
}
public List<Item> getItems() {
if (items == null) {
items = new ArrayList<Item>();
}
return items;
}
public void removeItem(Item item) {
getItems().remove(item);
}
public void setId(long theId) {
id = theId;
}
public void setItems(List<Item> theItems) {
items = theItems;
}
}<file_sep>/src/com/mydomain/shoppingcart/view/ShoppingViewHelper.java
package com.mydomain.shoppingcart.view;
import java.util.ArrayList;
import java.util.List;
import com.mydomain.shoppingcart.bo.Basket;
import com.mydomain.shoppingcart.bo.Item;
import com.mydomain.shoppingcart.service.ShoppingService;
import com.mydomain.shoppingcart.service.impl.ShoppingManager;
/**
* @author Ross
*/
public class ShoppingViewHelper {
private Basket basket;
private ShoppingService shoppingManager;
public void addItemToBasket(Item item) {
getBasket().addItem(item);
}
public List<Item> findItems() {
return new ArrayList<Item>(getShoppingManager().findItems());
}
public Basket getBasket() {
if (basket == null) {
basket = new Basket();
}
return basket;
}
public ShoppingService getShoppingManager() {
if (shoppingManager == null) {
shoppingManager = new ShoppingManager();
}
return shoppingManager;
}
public void removeItemFromBasket(Item item) {
getBasket().removeItem(item);
}
public void setBasket(Basket basket) {
this.basket = basket;
}
public void setShoppingManager(ShoppingService shoppingManager) {
this.shoppingManager = shoppingManager;
}
}<file_sep>/src/com/mydomain/shoppingcart/bo/Item.java
package com.mydomain.shoppingcart.bo;
/**
* @author Ross
*/
public class Item {
private String description;
private long id;
private String name;
private double price;
public Item() {}
public Item(long id, String name, String description, double price) {
this.id = id;
this.name = name;
this.description = description;
this.price = price;
}
public String getDescription() {
return description;
}
public long getId() {
return id;
}
public String getName() {
return name;
}
public double getPrice() {
return price;
}
public void setDescription(String theDescription) {
description = theDescription;
}
public void setId(long theId) {
id = theId;
}
public void setName(String theName) {
name = theName;
}
public void setPrice(double thePrice) {
price = thePrice;
}
}
| 027b5a46af3a159f506db1487705adcacb3c7511 | [
"Markdown",
"Java"
] | 5 | Java | Serg-Maximchuk/eshop | 80d635d5f01304a69038ed330120a8ded47af083 | 64abb9b2b332ec43c01efc4c1d7311af4deeeabc |
refs/heads/master | <repo_name>agricenko/rest-vs-graphql<file_sep>/README.md
# rest-vs-graphql
Rest vs GraphQL compare project
Knowledge Sharring session plan: https://docs.google.com/document/d/1qlgGcKIzXnWAr_qYMzrG1a6FESFormsl_04fQe-99Iw/edit
<file_sep>/src/main/java/com/palata22/demo/model/Customer.java
package com.palata22.demo.model;
import lombok.*;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import java.time.LocalDateTime;
@Builder(toBuilder = true)
@NoArgsConstructor(access = AccessLevel.PACKAGE)
@AllArgsConstructor(access = AccessLevel.PACKAGE)
@Getter
@Setter
@Entity
public class Customer {
@Id
@GeneratedValue
private Long id;
@Column
private String firstName;
@Column
private String lastName;
@Column
private LocalDateTime createDate;
}
<file_sep>/src/main/java/com/palata22/demo/config/HalBrowserLinkConfiguration.java
package com.palata22.demo.config;
import com.palata22.demo.rest.DepartmentRestController;
import com.palata22.demo.rest.ProjectRestController;
import com.palata22.demo.rest.UserRestController;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.rest.webmvc.RepositoryLinksResource;
import org.springframework.hateoas.ResourceProcessor;
import org.springframework.hateoas.mvc.ControllerLinkBuilder;
@Slf4j
@Configuration
public class HalBrowserLinkConfiguration {
@Bean
public ResourceProcessor<RepositoryLinksResource> halBroserLinks() {
return resource -> {
resource.add(ControllerLinkBuilder.linkTo(ProjectRestController.class).withRel("projects2"));
resource.add(ControllerLinkBuilder.linkTo(ControllerLinkBuilder.methodOn(ProjectRestController.class).getProjectUsers(null)).withRel("projects2/{id}/users"));
resource.add(ControllerLinkBuilder.linkTo(DepartmentRestController.class).withRel("departments2"));
resource.add(ControllerLinkBuilder.linkTo(UserRestController.class).withRel("users2"));
return resource;
};
}
}
<file_sep>/src/main/java/com/palata22/demo/graphql/project/ProjectMutation.java
package com.palata22.demo.graphql.project;
import com.coxautodev.graphql.tools.GraphQLMutationResolver;
import com.palata22.demo.model.Customer;
import com.palata22.demo.model.Project;
import com.palata22.demo.repository.ProjectRepository;
import lombok.AllArgsConstructor;
import org.springframework.stereotype.Component;
@Component
@AllArgsConstructor
public class ProjectMutation implements GraphQLMutationResolver {
private final ProjectRepository projectRepository;
public Project newProject(String name) {
Project project = Project.builder()
.name(name).build();
projectRepository.save(project);
return project;
}
}
<file_sep>/src/main/resources/application.properties
spring.jackson.serialization.fail-on-empty-beans=false
spring.jackson.serialization.write_dates_as_timestamps=false
server.port=9999<file_sep>/src/main/java/com/palata22/demo/graphql/QueryResolver.java
package com.palata22.demo.graphql;
import com.coxautodev.graphql.tools.GraphQLQueryResolver;
import com.coxautodev.graphql.tools.GraphQLResolver;
import com.palata22.demo.model.*;
import com.palata22.demo.repository.*;
import lombok.AllArgsConstructor;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
@AllArgsConstructor
public class QueryResolver implements GraphQLQueryResolver{
private final DepartmentRepository departmentRepository;
private final ProjectRepository projectRepository;
private final UserRepository userRepository;
private final TicketReporsitory ticketReporsitory;
private final CustomerRepository customerRepository;
public List<Department> findAllDepartments() {
return departmentRepository.findAll();
}
public List<Project> findAllProjects() {
return projectRepository.findAll();
}
public List<User> findAllUsers() {
return userRepository.findAll();
}
public List<Ticket> findAllTickets() {
return ticketReporsitory.findAll();
}
public Iterable<Customer> findAllCustomers() {
return customerRepository.findAll();
}
}
<file_sep>/src/main/java/com/palata22/demo/graphql/customer/CustomerResolver.java
package com.palata22.demo.graphql.customer;
public class CustomerResolver {
}
<file_sep>/src/main/java/com/palata22/demo/repository/UserRepository.java
package com.palata22.demo.repository;
import com.palata22.demo.model.User;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.rest.core.annotation.RestResource;
import org.springframework.stereotype.Repository;
@Repository
//@RestResource(exported = false)
public interface UserRepository extends JpaRepository<User, Long> {
}
<file_sep>/src/main/java/com/palata22/demo/model/Ticket.java
package com.palata22.demo.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.*;
import javax.persistence.*;
@Builder(toBuilder = true)
@NoArgsConstructor(access = AccessLevel.PACKAGE)
@AllArgsConstructor(access = AccessLevel.PACKAGE)
@Getter
@Setter
@Entity
@JsonIgnoreProperties({"hibernateLazyInitializer", "handler"})
public class Ticket {
@Id
@GeneratedValue
private int id;
@Column
private String description;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name="project_id", nullable = false)
private Project project;
@ManyToOne
@JoinColumn(name="reporter_user_id", nullable = false)
private User reporterUser;
@ManyToOne
@JoinColumn(name="assignee_user_id", nullable = false)
private User assigneeUser;
}
<file_sep>/src/main/java/com/palata22/demo/graphql/user/UserQuery.java
package com.palata22.demo.graphql.user;
import com.coxautodev.graphql.tools.GraphQLQueryResolver;
import com.palata22.demo.model.User;
import com.palata22.demo.repository.UserRepository;
import lombok.AllArgsConstructor;
import org.springframework.stereotype.Component;
@Component
@AllArgsConstructor
public class UserQuery implements GraphQLQueryResolver {
private final UserRepository userRepository;
public User findUser(Long id) {
return userRepository.findById(id).get();
}
}
<file_sep>/src/main/java/com/palata22/demo/rest/DepartmentRestController.java
package com.palata22.demo.rest;
import com.palata22.demo.model.Department;
import com.palata22.demo.repository.DepartmentRepository;
import lombok.AllArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping("/rest/departments")
@AllArgsConstructor
public class DepartmentRestController {
private final DepartmentRepository departmentRepository;
@GetMapping
public List<Department> getDepartments() {
return departmentRepository.findAll();
}
}
| 8185620813dadba8e2a6ae671cdf7716f1b738b8 | [
"Markdown",
"Java",
"INI"
] | 11 | Markdown | agricenko/rest-vs-graphql | 0be4fb42b1c94f62d8e52db86e0b07befbc0f551 | 764a565275082698d6876236f7bc519c59bd4552 |
refs/heads/master | <file_sep>import posixpath as pp
import urllib.request as urlrequest
import urllib.parse as urlparse
import os.path as osp
import argparse
import h5py
import numpy as np
from lxml import etree
from lxml.builder import ElementMaker
# Define XML namespaces to use...
ns = {'nc': 'http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
# A map connecting group names with their XML elements...
grp_node = dict()
# Create element factory...
E = ElementMaker(namespace=ns['nc'], nsmap=ns)
nc_group = E.group
nc_var = E.variable
nc_attr = E.attribute
nc_dim = E.dimension
def ncml_dtype(tobj):
"""Translate HDF5 datatype to NcML datatype information.
:arg h5py.h5t.TypeID tobj: h5py.h5t.TypeID object.
:return: A tuple with NcML datatype and a boolean flag indicating if the
datatype is unsigned.
:rtype: tuple
"""
type_cls = tobj.get_class()
unsigned = False
ncml_type = None
if type_cls == h5py.h5t.INTEGER:
size = {1: 'byte',
2: 'short',
4: 'int',
8: 'long'}
if tobj.get_sign() == h5py.h5t.SGN_NONE:
unsigned = True
ncml_type = size[tobj.get_size()]
elif type_cls == h5py.h5t.FLOAT:
size = {4: 'float',
8: 'double'}
ncml_type = size[tobj.get_size()]
elif type_cls == h5py.h5t.TIME:
raise TypeError('H5T_TIME datatype not supported in NcML')
elif type_cls == h5py.h5t.STRING:
ncml_type = 'String'
elif type_cls == h5py.h5t.BITFIELD:
raise TypeError('H5T_BITFIELD datatype not supported in NcML')
elif type_cls == h5py.h5t.OPAQUE:
ncml_type = 'opaque'
elif type_cls == h5py.h5t.COMPOUND:
ncml_type = 'Structure'
elif type_cls == h5py.h5t.REFERENCE:
raise TypeError('H5T_REFERENCE datatype not supported in NcML')
elif type_cls == h5py.h5t.ENUM:
raise NotImplementedError(
'H5T_ENUM datatype in NcML not supported yet')
elif type_cls == h5py.h5t.VLEN:
raise NotImplementedError(
'H5T_VLEN datatype in NcML not supported yet')
elif type_cls == h5py.h5t.ARRAY:
raise TypeError('H5T_ARRAY datatype not supported in NcML')
else:
raise ValueError('%s: Unknown type class' % type_cls)
return (ncml_type, unsigned)
def is_dimscale(attrs):
"""Check if a dataset is a dimension scale.
:arg h5py.AttributeManager attrs: Dataset's attributes.
"""
cond1 = False
cond2 = True
for n, v in attrs.items():
if n == 'CLASS' and v == b'DIMENSION_SCALE':
cond1 = True
if n == 'REFERENCE_LIST':
cond2 = True
return True if cond1 and cond2 else False
def do_attributes(elem, obj):
for aname, aval in obj.attrs.items():
if aname in ('CLASS', 'REFERENCE_LIST', 'DIMENSION_LIST', 'NAME'):
continue
# Attribute's NcML datatype...
aid = h5py.h5a.open(obj.id, aname.encode('utf-8'))
atype, is_unsign = ncml_dtype(aid.get_type())
# Process attribute's value...
shape = aid.shape
if atype == 'String':
if len(shape) == 0:
# Scalar attribute...
aval = aval.decode('utf-8')
else:
temp = np.ravel(aval)
aval = ' '.join([v.decode('utf-8') for v in temp])
else:
temp = np.ravel(aval)
aval = ' '.join([str(v) for v in temp])
# Create <attribute> XML element...
axml = nc_attr({'name': aname, 'type': atype, 'value': str(aval)})
if is_unsign:
axml.attrib['isUnsigned'] = 'true'
# Attach <attribute> element to its XML parent...
elem.append(axml)
def objinfo(name, obj):
"""Callback for the HDF5 object visitor."""
if isinstance(obj, h5py.Group):
if obj.name in grp_node:
raise RuntimeError('%s: XML node already exists' % obj.name)
# elem = etree.Element(nc_group, nsmap=ns,
# attrib={'name': pp.basename(obj.name)})
elem = nc_group({'name': pp.basename(obj.name)})
grp_node[obj.name] = elem
elif isinstance(obj, h5py.Dataset):
# Is the dataset a dimension scale...
if is_dimscale(obj.attrs):
# Create <dimension> XML element...
elem = nc_dim({'name': pp.basename(obj.name),
'length': str(obj.shape[0])})
grp_node[obj.parent.name].append(elem)
# Is the dataset just a netCDF dimension or should also be a netCDF
# variable...
if obj.attrs.get('NAME', b'').startswith(
b'This is a netCDF dimension but not a netCDF variable.'):
return
# Dataset's NcML datatype...
dset_type, is_unsign = ncml_dtype(obj.id.get_type())
# Dataset's shape...
if 'DIMENSION_LIST' in obj.attrs:
f = obj.file
shape = list()
for n in range(len(obj.shape)):
dim_dset = f[obj.attrs['DIMENSION_LIST'][n][0]]
shape.append(dim_dset.name)
else:
shape = [str(d) for d in obj.shape]
# Create <variable> element...
elem = nc_var({'name': pp.basename(obj.name), 'type': dset_type,
'shape': ' '.join(shape)})
if is_unsign:
elem.append(nc_attr({'name': '_Unsigned', 'value': 'true'}))
else:
raise TypeError('Unexpected HDF5 object: %s' % obj)
grp_node[obj.parent.name].append(elem)
do_attributes(elem, obj)
def h5toncml(h5fname):
"""Generate NcML representation of HDF5 file's content.
Dataset values not included.
:arg str h5fname: HDF5 file name.
:return: An instance of lxml.etree.ElementTree representing the NcML
content.
"""
f = h5py.File(h5fname, 'r')
# Create the root element and the document...
root = etree.Element(etree.QName(ns['nc'], 'netcdf'), nsmap=ns)
ncmldoc = etree.ElementTree(root)
# Add the XML schema attributes...
root.attrib[etree.QName(ns['xsi'], 'schemaLocation')] = \
'http://www.unidata.ucar.edu/schemas/netcdf/ncml-2.2.xsd'
# Add location attribute...
root.attrib['location'] = urlparse.urljoin(
'file:',
urlrequest.pathname2url(osp.abspath(h5fname)))
# Visit each HDF5 object...
grp_node['/'] = root
do_attributes(root, f)
f.visititems(objinfo)
f.close()
return ncmldoc
def main():
parser = argparse.ArgumentParser()
parser.add_argument('h5f', help='HDF5 file name')
parser.add_argument('-x', '--xpath', help='NcML XPath statement')
args = parser.parse_args()
if args.xpath:
raise NotImplementedError('NcML XPath not yet supported')
ncmldoc = h5toncml(args.h5f)
# Spit out the file's NcML...
print(etree.tostring(ncmldoc,
pretty_print=True,
xml_declaration=True,
encoding='UTF-8')
.decode('utf-8'))
<file_sep>"""Convenience wrapper to run the program from the source tree."""
from h5ncml.h5ncml import main
if __name__ == '__main__':
main()
<file_sep># h5ncml
Python command line program and module for generating NcML (netCDF XML) from HDF5 files.
**Note: Still in development.**
# Installation
$ python setup.py install
# Usage
Run:
$ python h5ncml.py --help
for more information how to use the program.
<file_sep>from setuptools import setup
VERSION = '0.1.0'
# with open("README.rst", "rb") as f:
# long_descr = f.read().decode("utf-8")
setup(
name='h5ncml',
packages=['h5ncml'],
entry_points={
'console_scripts': ['h5ncml = h5ncml.h5ncml:main']},
version=VERSION,
description=('Python command line application and module for generating '
'NcML from HDF5 files.'),
# long_description=long_descr,
author='<NAME>',
author_email='<EMAIL>',
install_requires=['numpy>=1.6.1', 'h5py>=2.5.0', 'lxml>=3.5.0']
)
<file_sep>from .h5ncml import main
main()
| 8d1aab8dc57aca585577823a95ad16865e9d27eb | [
"Markdown",
"Python"
] | 5 | Python | ajelenak-thg/h5ncml | d8128f3e4ef8f25712dc8afc7ae6ce675b0f4c5d | b97ef43cfb4e34af6bf6aaac277b8b0b1f8f76c7 |
refs/heads/master | <file_sep># react-painless-pagination
[](https://opensource.org/licenses/mit-license.php)
I wanted to see if I could create a drop in pagination component that "just worked" with minimal config. Goals are:
* SEO friendly (rel=prev, rel=next, etc)
* Smart enough to use window.location to determine current page if none is specified.
* Works even if total number of pages param isn't available.
* If react router is present, use it.
* Follows urls if no callback is specified.
* Common styles (bootstrap, foundation, material, et al) built in.
* small and fast.
This is WIP. Hope to have it done this weekend. Stay tuned!
## License
MIT
<file_sep>// index.test.js
import React from "react";
import PainlessPagination from "./index";
import renderer from "react-test-renderer";
import Enzyme, { shallow, render, mount } from "enzyme";
import sinon, { stub } from "sinon";
import Adapter from "enzyme-adapter-react-16";
// React 16 Enzyme Adapter
Enzyme.configure({ adapter: new Adapter() });
it("should render", () => {
const onStateSelect = stub();
const component = renderer.create(<PainlessPagination />);
const tree = component.toJSON();
expect(tree).toMatchSnapshot();
});
| 8c3ba27171401d24eb5eb8affba739d6218c6320 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | adamwysocki/react-painless-pagination | ed8107dd8f2ba6648bfccc965ddfa16fa994bb95 | ae1bd94b60eb01a2d4dc6359457c4b6934f50e8a |
refs/heads/master | <repo_name>imclab/schoolofdata-ext<file_sep>/requirements.txt
Django==1.6.1
dj-database-url==0.2.2
dj-static==0.0.5
gunicorn==18.0
psycopg2==2.5.2
static==0.4
wsgiref==0.1.2
git+https://github.com/mihi-tr/okbadgerclient.git
South==0.8.4
markdown
<file_sep>/README.md
# School of Data Ext
Extensions for the [School of Data](http://schoolofdata.org)
## Introduction
At the School of Data we need quite some extra things: Quizzes that are
able to issue badges once completed, feedbackforms that do the same etc.
This repository contains a giant django project (together with some
applications) to do exactly this.
## Contributing
There are several things you can do to help us grow this project
* Test and submit issues for bugs you find/features you wish it'd have
* Help us out and create nice style sheets for the quizzes and feedback
forms.
* Bring your Python and [Django](http://djangoproject.com) out and help
building the plattform.
* Think of more Quizzes for courses on the [School of Data](http://schoolofdata.org/courses)
If you plan on contributing check out the [contribution guidelines](https://github.com/okfn/schoolofdata-ext/blob/master/doc/contribution-guidelines.md)
## Installing for Development
Scodaext is a python/[Django](http://djangoproject.com) project. To start a
development you'll need:
* virtualenv
* python2
First create a new virtualenv and activate it
```
virtualenv venv
source venv/bin/activate
```
Then install all dependencies for development
```
pip install -r requirements.dev.txt
```
Then initialize the database
```
DATABASE_URL=sqlite:///scodaext.sqlite python manage.py syncdb
DATABASE_URL=sqlite:///scodaext.sqlite python manage.py migrate
```
Finally run the server
```
DATABASE_URL=sqlite:///scodaext.sqlite honcho start
```
Point your browser to [localhost:5000](http://localhost:5000) to start off.
The admin interface is on [/admin](http://localhost:5000/admin), the
quizzes will be quiz/slug.
## Contact
Contact us:
* email to <EMAIL>
* join us on IRC: irc.freenode.org \#school-of-data
* twitter [@schoolofdata](https://twitter.com/schoolofdata)
<file_sep>/scodaext/urls.py
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
import scodaext.apps.simplequiz.urls
import scodaext.apps.feedbackform.urls
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'scodaquiz.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^',include(scodaext.apps.simplequiz.urls)),
url(r'^feedback/',include(scodaext.apps.feedbackform.urls)),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| e51bee3b1b013a17641e5bb6e212bf08398ad812 | [
"Markdown",
"Python",
"Text"
] | 3 | Text | imclab/schoolofdata-ext | 32e921c9b22ec678f6aa35c780bf9348530a0b71 | 3d00ae011c76f08742da89474c1f8def7410be2c |
refs/heads/main | <file_sep>import pandas as pd
import os
import multiprocessing
import csv
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics import jaccard_score
# OUTPUT_FOLDER = '../MDA/Cosine/CountVectorizerBinary/Quarterly'
def get_previous(cik, df_p):
try:
previous_text = list(df_p.loc[df_p['cik'] == cik]['mda'])[0]
return previous_text
except IndexError:
return None
def compute_similarity(document1, document2, similarity_type, vectorizer_type, binary=False):
if vectorizer_type == 'count':
vectorizer = CountVectorizer(binary=binary)
elif vectorizer_type == 'tfidf':
vectorizer = TfidfVectorizer(binary=binary)
else:
vectorizer = CountVectorizer(binary=binary)
vectorizer.fit([document1, document2])
if similarity_type == 'cosine':
return cosine_similarity(vectorizer.transform([document1]), vectorizer.transform([document2]))[0][0]
elif similarity_type == 'jaccard':
return jaccard_score(vectorizer.transform([document1]).toarray().tolist()[0], vectorizer.transform([document2]).toarray().tolist()[0])
else:
return cosine_similarity(vectorizer.transform([document1]), vectorizer.transform([document2]))[0][0]
def mp_worker(args):
current_text = args[0]
previous_text = args[1]
date = args[2]
cik = args[3]
similarity_type = args[4]
vectorizer_type = args[5]
binary = args[6]
return [date, cik, compute_similarity(current_text, previous_text, similarity_type, vectorizer_type, binary)]
def mp_handler(work, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
n_work = len(work)
with open(output_dir, 'a', newline='') as f:
writer = csv.writer(f)
counter = 0
for result in p.imap(mp_worker, work):
counter += 1
if result is not None:
writer.writerow(result)
print(f'\rPercentage Complete: {round((counter / n_work) * 100, 2)}%', end="", flush=True)
print('\n')
p.close()
def get_yearly_change(directory, similarity_type, vectorizer_type, binary, output_dir):
_, _, dirs = next(os.walk(directory))
for i, d in enumerate(dirs):
if i - 4 < 0:
continue
quarter = d.split('_')[2][:4]
year = d.split('_')[1]
print(f'Working on {quarter} of {year}...')
previous_dir = dirs[i - 4]
df_c = pd.read_csv(f'{directory}\\{d}', header=None, names=['path', 'date', 'cik', 'mda'])
df_p = pd.read_csv(f'{directory}\\{previous_dir}', header=None, names=['path', 'date', 'cik', 'mda'])
df_c['previous_mda'] = df_c['cik'].apply(lambda x: get_previous(x, df_p))
df_c = df_c[~(df_c['mda'].isna()) & ~(df_c['previous_mda'].isna())]
work = list(zip(df_c['mda'],
df_c['previous_mda'],
df_c['date'],
df_c['cik'],
[similarity_type] * len(df_c['mda']),
[vectorizer_type] * len(df_c['mda']),
[binary] * len(df_c['mda'])))
output_directory = f'{output_dir}/10-K_{year}_{quarter}.csv'
mp_handler(work, 6, output_directory)
if __name__ == "__main__":
get_yearly_change('C:\\Users\\jpetr\\PycharmProjects\\SEC-Analytics\\Text Extraction\\Extracted\\Quarterly',
'cosine', 'count', False, '../MDA/cosine/CountVectorizer/Quarterly')<file_sep># SEC-Analytics
Text analytics repository for SEC filings, including 10-Q's and 10-K's
Hello!
This was made by <NAME>.
<file_sep>import re
import os
import csv
import time
import pickle
import multiprocessing
import os
from gensim.models import KeyedVectors
import re
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import plot_confusion_matrix
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split, GridSearchCV
import numpy as np
import pickle
OPEN_CLASSIFIER = pickle.load(open('../rf_quarterly_open_wv.pkl', 'rb'))
CLOSE_CLASSIFIER = pickle.load(open('../rf_quarterly_close_wv.pkl', 'rb'))
WV = KeyedVectors.load("mda.wordvectors", mmap='r')
def get_mda(file_text, clf_open, clf_close, wv, add_extra=False):
open_probabilities = []
open_indices = []
management_indices = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
item_indices = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
interested_locations = management_indices + item_indices
for p in interested_locations:
surrounding_text = file_text[p - 500: p + 500]
features = get_open_features(surrounding_text, file_text, wv, p)
X = [features]
predicted_prob = clf_open.predict_proba(X)[0][1]
open_probabilities.append(predicted_prob)
open_indices.append(p)
try:
index = open_probabilities.index(max(open_probabilities))
except ValueError:
return ''
open_index = open_indices[index]
close_probabilities = []
close_indices = []
newline_indices = [m.start() for m in
re.finditer('\n', file_text[open_index:], re.IGNORECASE)]
interested_locations = []
for n in newline_indices:
questionable_text = file_text[n + open_index: n + open_index + 500].lower()
if 'item' in questionable_text:
interested_locations.append(n + open_index)
elif 'quantitative' in questionable_text:
interested_locations.append(n + open_index)
elif 'control' in questionable_text:
interested_locations.append(n + open_index)
elif n > len(file_text[open_index:]) - 1000:
interested_locations.append(n + open_index)
elif 'report' in questionable_text:
interested_locations.append(n + open_index)
found = False
for p in interested_locations:
surrounding_text = file_text[p - 500: p + 500]
features = get_close_features_3(surrounding_text, file_text, wv, p)
X = [features]
if np.isnan(features).any():
continue
predicted_prob = clf_close.predict_proba(X)[0][1]
if predicted_prob > 0.7:
close_index = p
found = True
break
close_indices.append(p)
close_probabilities.append(predicted_prob)
if not found:
index = close_probabilities.index(max(close_probabilities))
close_index = close_indices[index]
if add_extra:
return file_text[open_index: close_index + 200]
else:
return file_text[open_index: close_index]
def modify_formatting(text):
return text.replace('\n', ' newline ').replace('\t', ' tab ').replace(' ', ' d_s ').replace('.', ' p ')
def get_n_surrounding_words(text, n, trailing=True):
"""
Searches for text, and retrieves n words either side of the text
"""
if trailing:
return text.split()[1:n + 1]
else:
return text.split()[::-1][1:n + 1]
def get_close_features_3(surrounding_text, file_text, wv, index):
#Test: 0.91/0.96
leading_characters = surrounding_text[:500]
trailing_characters = surrounding_text[500:]
leading_characters = modify_formatting(leading_characters)
trailing_characters = modify_formatting(trailing_characters)
leading_words = get_n_surrounding_words(leading_characters, 30, trailing=False)
trailing_words = get_n_surrounding_words(trailing_characters, 40)
min_feature_leading = [0] * 100
max_feature_leading = [0] * 100
isQuantitativeInDocument = 1 if len(re.findall(r'quantitative[\s\S]+qualitative', file_text, re.IGNORECASE)) > 0 else 0
isControlsInDocument = 1 if len(re.findall(r'controls[\s\S]+procedures', file_text, re.IGNORECASE)) > 0 else 0
average_leading = [0] * 100
leading_counter = 0
for word in leading_words:
try:
embedding = wv[word]
leading_counter += 1
average_leading = np.add(average_leading, embedding)
except KeyError:
continue
for i in range(100):
if embedding[i] > max_feature_leading[i]:
max_feature_leading[i] = embedding[i]
if embedding[i] < min_feature_leading[i]:
min_feature_leading[i] = embedding[i]
average_leading = np.divide(average_leading, leading_counter)
min_feature_trailing = [0] * 100
max_feature_trailing = [0] * 100
average_trailing = [0] * 100
trailing_counter = 0
for word in trailing_words:
try:
embedding = wv[word]
trailing_counter += 1
average_trailing = np.add(average_trailing, embedding)
except KeyError:
# print(word)
continue
for i in range(100):
if embedding[i] > max_feature_trailing[i]:
max_feature_trailing[i] = embedding[i]
if embedding[i] < min_feature_trailing[i]:
min_feature_trailing[i] = embedding[i]
if trailing_counter != 0:
average_trailing = np.divide(average_trailing, trailing_counter)
# item_count = len(surrounding_text.lower().split('item'))
leading_item_count = len(file_text[index - 500: index].lower().split('item'))
trailing_item_count = len(file_text[index: index + 500].lower().split('item'))
leading_newline_count = len(file_text[index - 500: index].lower().split('\n\n'))
trailing_newline_count = len(file_text[index: index + 500].lower().split('\n\n'))
features = min_feature_leading + max_feature_leading + min_feature_trailing + max_feature_trailing + [
leading_item_count, trailing_item_count, leading_newline_count, trailing_newline_count,
leading_item_count * leading_newline_count, trailing_item_count * trailing_newline_count,
index / len(file_text), isQuantitativeInDocument, isControlsInDocument
] + list(average_leading) + list(average_trailing)
return features
def get_open_features(surrounding_text, file_text, wv, index):
leading_characters = surrounding_text[:500]
trailing_characters = surrounding_text[500:]
leading_characters = modify_formatting(leading_characters)
trailing_characters = modify_formatting(trailing_characters)
leading_words = get_n_surrounding_words(leading_characters, 30, trailing=False)
trailing_words = get_n_surrounding_words(trailing_characters, 30)
min_feature_leading = [0] * 100
max_feature_leading = [0] * 100
for word in leading_words:
try:
embedding = wv[word]
except KeyError:
continue
for i in range(100):
if embedding[i] > max_feature_leading[i]:
max_feature_leading[i] = embedding[i]
if embedding[i] < min_feature_leading[i]:
min_feature_leading[i] = embedding[i]
min_feature_trailing = [0] * 100
max_feature_trailing = [0] * 100
for word in trailing_words:
try:
embedding = wv[word]
except KeyError:
# print(word)
continue
for i in range(100):
if embedding[i] > max_feature_trailing[i]:
max_feature_trailing[i] = embedding[i]
if embedding[i] < min_feature_trailing[i]:
min_feature_trailing[i] = embedding[i]
# item_count = len(surrounding_text.lower().split('item'))
leading_item_count = len(file_text[index - 500: index].lower().split('item'))
trailing_item_count = len(file_text[index: index + 500].lower().split('item'))
leading_newline_count = len(file_text[index - 500: index].lower().split('\n\n'))
trailing_newline_count = len(file_text[index: index + 500].lower().split('\n\n'))
features = min_feature_leading + max_feature_leading + min_feature_trailing + max_feature_trailing + [
leading_item_count, trailing_item_count, leading_newline_count, trailing_newline_count,
leading_item_count * leading_newline_count, trailing_item_count * trailing_newline_count,
index / len(file_text)
]
return features
def mp_worker(args):
file_name = str(args)
clf_open = OPEN_CLASSIFIER
clf_close = CLOSE_CLASSIFIER
file_text = open(file_name).read()
mda = get_mda(file_text, clf_open, clf_close, WV).replace('\n', '\\n')
cik = file_name.split('_')[6]
date = file_name.split('_')[2][-8:]
return file_name, date, cik, mda
def mp_handler(file_names, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
start = time.time()
writer = csv.writer(open(output_dir, 'a', newline=''))
counter = 0
for result in p.imap(mp_worker, file_names):
counter += 1
print(f'\rPercentage Complete: {round((counter / len(file_names)) * 100, 2)}%', end="", flush=True)
writer.writerow([result[0], result[1], result[2], result[3]])
print('\n')
end = time.time()
print(f'Multiple Threads: {round(end - start, 2)} seconds')
p.close()
if __name__ == '__main__':
path_data_list = ['D:\\SEC Filing Data\\10-X_C_2006-2010',
'D:\\SEC Filing Data\\10-X_C_2011-2015',
'D:\\SEC Filing Data\\10-X_C_2016-2018']
for path_data in path_data_list:
_, years, _ = next(os.walk(path_data))
for year in years:
if int(year) < 2017:
continue
_, quarters, _ = next(os.walk(f'{path_data}\\{year}'))
for quarter in quarters:
if int(year) == 2017 and (quarter == 'QTR1' or quarter == 'QTR2'):
continue
print(f'Working on {quarter} of {year}...')
output_directory = f'Extracted\\Quarterly\\10-Q_{year}_{quarter}.csv'
all_directories = []
_, _, directories = next(os.walk(f'{path_data}\\{year}\\{quarter}'))
for directory in directories:
if '_10-Q_' in directory:
all_directories += [f'{path_data}\\{year}\\{quarter}\\' + directory]
mp_handler(all_directories, 4, output_directory)<file_sep>import os
import pickle
import re
import csv
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
# https://www.sec.gov/files/form10-q.pdf
PATH = '../../Data/10-Q Sample'
REGEX_10Q = r"(Item[\s]+?2\.[\s\S]*?)(Item[\s]+?3\.)"
OPEN_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_2', 'trailing_newline',
'leading_newline', 'total_size', 'regex_open', 'trailing_analysis']
CLOSE_INDEPENDENT_VARIABLES = ['position', 'trailing_period', 'trailing_3', 'leading_newline', 'total_size',
'leading_tab', 'leading_spaces', 'regex_close', 'trailing_quantitative']
OPEN_CLASSIFIER = pickle.load(open('../open_quarterly_random_forest.pkl', 'rb'))
CLOSE_CLASSIFIER = pickle.load(open('../close_quarterly_random_forest.pkl', 'rb'))
def get_mda(clf_open, clf_close, file_text):
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
total_text_length = len(file_text)
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
open_probabilities = []
open_indices = []
close_probabilities = []
close_indices = []
for index, item in enumerate(items):
trailing_80 = file_text[item: item + 80].lower()
trailing_management = 1 if 'management' in trailing_80 else 0
trailing_analysis = 1 if 'analysis' in trailing_80 else 0
trailing_quantitative = 1 if 'quantitative' in trailing_80 else 0
if sum([trailing_management, trailing_analysis, trailing_quantitative]) < 1:
continue
trailing_50 = file_text[item: item + 50].lower()
trailing_100 = file_text[item: item + 100].lower()
trailing_20 = file_text[item: item + 20].lower()
trailing_period = 1 if '.' in trailing_20 else 0
trailing_2 = 1 if '2' in trailing_20 else 0
trailing_3 = 1 if '3' in trailing_20 else 0
trailing_newline = 1 if '\n' in trailing_100 else 0
leading_newline = 1 if '\n' in file_text[item - 5: item] else 0
leading_spaces = 1 if ' ' in file_text[item - 5: item] else 0
leading_tab = 1 if '\t' in file_text[item - 5: item] else 0
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
regex_close = 1
except IndexError:
e = 1
data = {
'position': item / total_text_length,
'trailing_management': trailing_management,
'trailing_period': trailing_period,
'trailing_2': trailing_2,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'regex_open': regex_open,
'trailing_analysis': trailing_analysis,
'trailing_3': trailing_3,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_close': regex_close,
'trailing_quantitative': trailing_quantitative
}
open_features = []
for f in OPEN_INDEPENDENT_VARIABLES:
open_features.append(data[f])
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
open_probability = clf_open.predict([open_features])
close_probability = clf_close.predict([close_features])
open_probabilities.append(open_probability)
close_probabilities.append(close_probability)
open_indices.append(item)
close_indices.append(item)
try:
open_index = open_probabilities.index(max(open_probabilities))
close_index = close_probabilities.index(max(close_probabilities))
print('Open: ', open_index)
print('Close: ', close_index)
except ValueError:
return ''
if open_index > close_index:
return ''
else:
# return file_text[items[open_index]: items[close_index]]
return file_text[open_indices[open_index]: (close_indices[close_index] + 200)].replace('\n', '\\n')
def get_supervised_mda(new_file_name):
# already_done = []
already_done = open(new_file_name).read()
_, _, file_names = next(
os.walk(PATH))
for file_name in file_names:
if file_name in already_done:
print('file already done!')
continue
file_text = open(f'{PATH}\\{file_name}').read()
mda_text = get_mda(OPEN_CLASSIFIER, CLOSE_CLASSIFIER, file_text)
modified_file = open('IsThisTheSection.txt', 'w')
modified_file.write(mda_text.replace('\\n', '\n'))
modified_file.close()
output_file = open('FileInQuestion.txt', 'w')
output_file.write(file_text.replace('\\n', '\n'))
output_file.close()
os.startfile('FileInQuestion.txt')
os.startfile('IsThisTheSection.txt')
is_correct = input('<ENTER> if the section correct.')
writer = csv.writer(open(new_file_name, 'a', newline=''))
if is_correct == '':
writer.writerow([file_name, mda_text])
else:
output_file = open('FileInQuestion.txt', 'r').read().replace('\n', '\\n')
writer.writerow([file_name, output_file])
def create_classification(file):
df = pd.read_csv(file, header=None, names=['file', 'mda'])
df = df[df['mda'] != '-9']
files = list(df['file'])
mdas = list(df['mda'])
data = pd.DataFrame(columns=['file', 'open_target', 'close_target', 'trailing_management', 'trailing_discussion',
'trailing_item', 'trailing_2', 'trailing_3', 'trailing_4', 'trailing_controls',
'trailing_procedures', 'trailing_quantitative', 'trailing_qualitative'])
vectorizer = TfidfVectorizer()
for i, file_name in enumerate(files):
file_text = open(f'{PATH}\\{file_name}').read()
newlines = [m.start() for m in re.finditer('discussion', file_text, re.IGNORECASE)]
mda_text = mdas[i].replace('\\n', '\n')
mda_open = mda_text[:500]
mda_close = mda_text[-500:]
total_text_length = len(mda_text)
for index in newlines:
index_text_open = file_text[index:index + 500]
index_text_close = file_text[index - 500:index]
vectorizer.fit([mda_open, index_text_open])
open_similarity = cosine_similarity(vectorizer.transform([mda_open]), vectorizer.transform([index_text_open]))[0][0]
vectorizer.fit([mda_close, index_text_close])
close_similarity = cosine_similarity(vectorizer.transform([mda_close]), vectorizer.transform([index_text_close]))[0][0]
open_target = 0
close_target = 0
if open_similarity > 0.9:
open_target = 1
if close_similarity > 0.9:
close_target = 1
trailing_text = mda_text[index:index + 100].lower()
# trailing_text_300 = mda_text[index:index + 300].lower()
trailing_management = 1 if 'management' in trailing_text else 0
trailing_discussion = 1 if 'discussion' in trailing_text else 0
trailing_item = 1 if 'item' in trailing_text else 0
trailing_2 = 1 if '2' in trailing_text else 0
trailing_3 = 1 if '3' in trailing_text else 0
trailing_4 = 1 if '4' in trailing_text else 0
trailing_controls = 1 if 'controls' in trailing_text else 0
trailing_procedures = 1 if 'procedures' in trailing_text else 0
trailing_quantitative = 1 if 'quantitative' in trailing_text else 0
trailing_qualitative = 1 if 'qualitative' in trailing_text else 0
data = data.append(
{
'file': file,
'open_target': open_target,
'close_target': close_target,
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_management': trailing_management,
'trailing_discussion': trailing_discussion,
'trailing_item': trailing_item,
'trailing_2': trailing_2,
'trailing_3': trailing_3,
'trailing_4': trailing_4,
'trailing_controls': trailing_controls,
'trailing_procedures': trailing_procedures,
'trailing_quantitative': trailing_quantitative,
'trailing_qualitative': trailing_qualitative
}, ignore_index=True)
if __name__ == "__main__":
get_supervised_mda('NewExtractionSupervised.csv')
# p = 'D:\\SEC Filing Data\\10-X_C_1993-2000\\1997\\QTR1\\19970102_10-Q_edgar_data_58636_0000058636-97-000001_1.txt'
# file_text = open(p, 'r').read()
# mda = get_mda(OPEN_CLASSIFIER, CLOSE_CLASSIFIER, file_text)
# modified_file_text = ''
# for index, s in enumerate(re.split(r'item', file_text, flags=re.IGNORECASE)):
# modified_file_text += f'{s} item $%$ {index} $%$'
#
# output_file = open('FileInQuestion.txt', 'w')
# output_file.write(modified_file_text)
# output_file.close()
#
# os.startfile('FileInQuestion.txt')
# create_classification('NewExtractionSupervised.csv')
<file_sep>import os
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import multiprocessing
import csv
pd.set_option('display.max_columns', 10)
pd.options.display.width = 0
STARTING_YEAR = 2006
# STARTING_YEAR = 1993
STARTING_QUARTER = 1
ENDING_YEAR = 2010
# ENDING_YEAR = 2018
ENDING_QUARTER = 4
PATH_TO_EXTRACTED = '../../Old/Analysis/Extracted/Quarterly'
OUTPUT_DIRECTORY = 'test.csv'
def get_quarter_mda(desired_year, desired_quarter, path_to_extracted):
_, _, file_names = next(os.walk(path_to_extracted))
for file_name in file_names:
if (f'QTR{desired_quarter}'in file_name) and (str(desired_year) in file_name):
return pd.read_csv(f'{path_to_extracted}\\{file_name}', header=None, names=['path', 'date', 'cik', 'mda'])
def compute_similarity(document1, document2):
vectorizer = TfidfVectorizer()
vectorizer.fit([document1, document2])
return cosine_similarity(vectorizer.transform([document1]), vectorizer.transform([document2]))[0][0]
def mp_worker(args):
document1 = args[0]
document2 = args[1]
cik = args[2]
date = args[3]
similarity = compute_similarity(document1, document2)
return similarity, cik, date
def mp_handler(current_mda, previous_mda, cik, date, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
args = list(zip(current_mda, previous_mda, cik, date))
writer = csv.writer(open(output_dir, 'a', newline=''))
counter = 0
for result in p.imap(mp_worker, args):
counter += 1
print(f'\rPercentage Complete: {round((counter / len(args)) * 100, 2)}%', end="", flush=True)
writer.writerow([result[0], result[1], result[2]])
print('\n')
if __name__ == "__main__":
for year in range(STARTING_YEAR, ENDING_YEAR + 1):
for quarter in range(1, 4 + 1):
if (year == STARTING_YEAR) and ((quarter == 1) or (quarter == 2)):
continue
df_one = get_quarter_mda(year, [1, 2, 3, 4][quarter - 2], PATH_TO_EXTRACTED)[['cik', 'mda']]
df_one = df_one.set_index('cik')
df_two = get_quarter_mda(year, [1, 2, 3, 4][quarter - 3], PATH_TO_EXTRACTED)[['cik', 'mda']]
df_two = df_two.rename(columns={'mda': 'mda_two'})
df_two = df_two.set_index('cik')
df_current = get_quarter_mda(year, quarter, PATH_TO_EXTRACTED)
df_all = df_current.join(df_one, on='cik', how='left', lsuffix='_current', rsuffix='_one')
df_all = df_all.join(df_two, on='cik', how='left')
df_all['mda_previous'] = df_all['mda_one']
# df_all.loc[df_all['mda_previous'].isnull(), 'mda_previous'] = df_all['mda_two']
df_all['mda_previous'].fillna(df_all['mda_two'], inplace=True)
df_all = df_all[(~df_all['mda_current'].isna()) & (~df_all['mda_previous'].isna())]
current_mda = list(df_all['mda_current'])
previous_mda = list(df_all['mda_previous'])
dates = list(df_all['date'])
ciks = list(df_all['cik'])
print(f'Working on Quarter {quarter} of {year}')
mp_handler(current_mda, previous_mda, ciks, dates, 1, OUTPUT_DIRECTORY)
<file_sep>import os
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics import jaccard_score
import multiprocessing
import csv
import time
import numpy as np
# https://poseidon01.ssrn.com/delivery.php?ID=803090114088005098001115102064020103016052085015079029126119079027126070064113116030006052013006037046016118069065014088116005048011066061044005098018125101099122071079013098081117007004069103122123112093100065074127113008077064093107098018100126111&EXT=pdf&INDEX=TRUE
def compute_similarity(document1, document2):
vectorizer = CountVectorizer()
vectorizer.fit([document1, document2])
return cosine_similarity(vectorizer.transform([document1]), vectorizer.transform([document2]))[0][0]
def compute_jaccard(document1, document2):
vectorizer = CountVectorizer()
vectorizer.fit([document1, document2])
return jaccard_score(np.array(vectorizer.transform([document1]).toarray()[0]), vectorizer.transform([document2]).toarray()[0])
def mp_worker(args):
current_file = args[0]
previous_file = args[1]
date = current_file.split('\\')[5].split('_')[0]
cik = current_file.split('\\')[5].split('_')[4]
current_file = open(current_file).read()
previous_file = open(previous_file).read()
return [date, cik, compute_jaccard(current_file, previous_file)]
def mp_handler(current_files, previous_files, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
n_cik = len(current_files)
work = zip(current_files, previous_files)
# writer = csv.writer(open(output_dir, 'a', newline=''))
with open(output_dir, 'a', newline='') as f:
writer = csv.writer(f)
counter = 0
for result in p.imap(mp_worker, work):
counter += 1
if result is not None:
writer.writerow(result)
print(f'\rPercentage Complete: {round((counter / n_cik) * 100, 2)}%', end="", flush=True)
print('\n')
p.close()
def get_previous_filing(cik, files_t_1, files_t_2):
if cik in files_t_1:
return files_t_1[cik]
elif cik in files_t_2:
return files_t_2[cik]
else:
return None
if __name__ == "__main__":
path_data_list = [
'D:\\SEC Filing Data\\10-X_C_1993-2000',
'D:\\SEC Filing Data\\10-X_C_2001-2005',
'D:\\SEC Filing Data\\10-X_C_2006-2010',
'D:\\SEC Filing Data\\10-X_C_2011-2015',
'D:\\SEC Filing Data\\10-X_C_2016-2018'
]
work = []
for path_data in path_data_list:
_, years, _ = next(os.walk(path_data))
for year in years:
_, quarters, _ = next(os.walk(f'{path_data}\\{year}'))
for quarter in quarters:
work.append(f'{path_data}\\{year}\\{quarter}')
files_t_1 = None
files_t_2 = None
for w in work:
quarter = w.split('\\')[4]
year = w.split('\\')[3]
# if int(year) < 2002:
# continue
#
# if int(year) == 2002 and quarter in ['QTR1', 'QTR2', 'QTR3']:
# continue
print(f'Working on {quarter} of {year}...')
output_directory = f'../EntireFile\\Jaccard\\CountVectorizer\\Quarterly\\10-Q_{year}_{quarter}.csv'
files_t = {}
file_names = []
_, _, directories = next(os.walk(w))
for directory in directories:
if '_10-Q_' in directory:
cik = directory.split('_')[4]
files_t[cik] = f'{w}\\{directory}'
file_names.append(directory)
if (files_t_1 is not None) and (files_t_2 is not None):
current_files = []
previous_files = []
for file_name in file_names:
cik = file_name.split('_')[4]
previous_file = get_previous_filing(cik, files_t_1, files_t_2)
if previous_file:
previous_files.append(previous_file)
current_files.append(f'{w}\\{file_name}')
a = mp_handler(current_files, previous_files, 1, output_directory)
files_t_2 = files_t_1
files_t_1 = files_t
else:
files_t_2 = files_t_1
files_t_1 = files_t
<file_sep>import pandas as pd
import os
import numpy as np
from PetriProgramming.PortfolioHelper import plot_cumulative_returns, summary_statistics
import matplotlib.pyplot as plt
pd.set_option('display.max_columns', 80)
pd.set_option('display.max_rows', 25)
pd.set_option('display.min_rows', 25)
pd.options.display.width = 150
def get_similarity_data(dir):
_, _, annual_directories = next(os.walk(f'{dir}/Annual'))
_, _, quarterly_directories = next(os.walk(f'{dir}/Quarterly'))
df_similarity = pd.DataFrame()
for d in annual_directories:
df = pd.read_csv(f'{dir}/Annual/{d}', header=None,
names=['date_int', 'cik', 'similarity'])
df_similarity = df_similarity.append(df)
for d in quarterly_directories:
df = pd.read_csv(f'{dir}/Quarterly/{d}', header=None,
names=['date_int', 'cik', 'similarity'])
df_similarity = df_similarity.append(df)
df_similarity['year'] = df_similarity['date_int'].apply(lambda x: int(str(x)[:4]))
df_similarity['month'] = df_similarity['date_int'].apply(lambda x: int(str(x)[4:6]))
return df_similarity
def get_return_data():
returns = pd.read_csv('../../Data/Returns-CRSP.csv')
returns['date'] = pd.to_datetime(returns['date'], format='%Y%m%d')
returns['PRC'] = returns['PRC'].apply(lambda x: abs(x))
returns['RET'] = pd.to_numeric(returns['RET'], errors='coerce')
returns['year'] = returns['date'].dt.year
returns['month'] = returns['date'].dt.month
CIK_link = pd.read_csv('../../Data/CIK_Link.csv')
CIK_link['merge_CUSIP'] = CIK_link['cusip'].apply(lambda x: str(x)[:8])
CIK_link['datadate'] = pd.to_datetime(CIK_link['datadate'], format='%Y%m%d')
CIK_link['month'] = CIK_link['datadate'].dt.month
CIK_link['year'] = CIK_link['datadate'].dt.year
CIK_merged = pd.merge(returns, CIK_link, how='left', left_on=['year', 'month', 'CUSIP'],
right_on=['year', 'month', 'merge_CUSIP'])
# print(CIK_merged)
return CIK_merged
def get_data():
CIK_merged = get_return_data()
similarity = get_similarity_data('../../Cosine Similarity Analysis/EntireFilePreviousYear/Cosine/CountVectorizerBinary')
df_all = pd.merge(CIK_merged, similarity, how='left', on=['cik', 'year', 'month'])
df_all = df_all[df_all['year'] < 2019]
df_all.to_csv('dataset_v2.csv')
return df_all
# print(df_all)
def portfolio(df, n_portfolios):
# print(df)
df['CAP'] = abs(df['PRC']) * df['SHROUT']
desired_columns = ['similarity', 'month', 'year', 'PERMNO', 'date', 'RET', 'CAP']
df = df[desired_columns]
df = df[df['year'] > 1994]
df_dropped = df.dropna(subset=['similarity'])
df_dropped['group'] = df_dropped.groupby(['year', 'month'])['similarity'].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
df_dropped_group_capitalization = df_dropped.groupby(['year', 'month', 'group'])['CAP'].sum().reset_index()
df_dropped_group_capitalization.columns = ['year', 'month', 'group', 'group_cap']
df_dropped = pd.merge(df_dropped, df_dropped_group_capitalization, on=['year', 'month', 'group'])
df_dropped['weight'] = df_dropped['CAP'] / df_dropped['group_cap']
df_dropped = df_dropped[['date', 'PERMNO', 'weight', 'group']]
df = pd.merge(df, df_dropped, on=['date', 'PERMNO'], how='left')
df['date'] = pd.to_datetime(df['date'])
df['weight'] = df['weight'].ffill(limit=2)
df = df[~df['weight'].isna()]
df = df[~df['group'].isna()]
df.groupby('date')['PERMNO'].count().plot()
plt.show()
# Validate sorting methodology
a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
for i in range(n_portfolios):
plt.plot(a[i], label=f'Portfolio {i}')
plt.show()
a = df.groupby(['group', 'date'])['weight'].sum().unstack().T
for i in range(n_portfolios):
plt.plot(a[i], label=f'Portfolio {i}')
plt.show()
df['weighted_return'] = df['weight'] * df['RET']
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
# print(df)
# print(df.groupby(['year', 'month', 'PERMNO']).mean())
# print(df.groupby(['year', 'month']).count())
def portfolio_testing():
df = pd.DataFrame({
"date": [1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 6, 7, 8, 9, 10] + [1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 6, 7, 8, 9, 10],
"stock": ['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B', 'A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B'] + ['C', 'C', 'C', 'C', 'C', 'D', 'D', 'D', 'D', 'D', 'C', 'C', 'C', 'C', 'C', 'D', 'D', 'D', 'D', 'D'],
"weight": [.4, .4, .4, .4, .4, .6, .6, .6, .6, .6, .3, .3, .3, .3, .3, .7, .7, .7, .7, .7] + [.2, .2, .2, .2, .2, .8, .8, .8, .8, .8, .3, .3, .3, .3, .3, .7, .7, .7, .7, .7],
"return": [0.1, 0.2, 0.1, 0.2, -0.1, -0.05, 0.05, 0.2, -0.05, 0.05, 0.05, -0.05, 0.05, 0.05, 0.2, -0.05, 0.05, -0.05, 0.05, 0.2] + [0.1, 0.2, 0.1, 0.2, -0.1, -0.05, 0.05, 0.2, -0.05, 0.05, 0.05, -0.05, 0.05, 0.05, 0.2, -0.05, 0.05, -0.05, 0.05, 0.2],
"period": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2] + [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
"group": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
})
df['cumulative'] = df.groupby(['group', 'stock', 'period']).apply(lambda x: np.cumprod(1 + x))['return']
df['weighted_cumulative'] = df['cumulative'] * df['weight']
df_portfolio = df.groupby(['group', 'period', 'date'])['weighted_cumulative'].sum().reset_index()
df_portfolio['port_returns'] = df_portfolio.groupby(['group', 'period'])['weighted_cumulative'].pct_change()
df_portfolio['port_returns'] = df_portfolio.apply(lambda x: x['weighted_cumulative'] / 1 - 1 if np.isnan(x['port_returns']) else x['port_returns'], axis=1)
print(df_portfolio)
if __name__ == '__main__':
# get_similarity_data('../../Cosine Similarity Analysis/EntireFilePreviousYear/Cosine/CountVectorizerBinary')
# get_return_data()
# get_data()
# portfolio(get_data())
# n_portfolios = 5
# p = portfolio(pd.read_csv('dataset_v2.csv', index_col=0), n_portfolios)
#
# plot_cumulative_returns(p, n_portfolios)
# summary_statistics(p, n_portfolios)
portfolio_testing()
<file_sep>import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_score
import torch
import transformers as ppb
import warnings
import os
from sklearn.metrics import plot_confusion_matrix
import matplotlib.pyplot as plt
import re
import math
warnings.filterwarnings('ignore')
PATH = '../../Data/10-Q Sample'
def bert_test():
df = pd.read_csv('https://github.com/clairett/pytorch-sentiment-classification/raw/master/data/SST2/train.tsv', delimiter='\t', header=None)
batch_1 = df[:2000]
model_class, tokenizer_class, pretrained_weights = (ppb.DistilBertModel, ppb.DistilBertTokenizer, 'distilbert-base-uncased')
tokenizer = tokenizer_class.from_pretrained(pretrained_weights)
model = model_class.from_pretrained(pretrained_weights)
tokenized = batch_1[0].apply((lambda x: tokenizer.encode(x, add_special_tokens=True)))
max_len = 0
for i in tokenized.values:
if len(i) > max_len:
max_len = len(i)
padded = np.array([i + [0]*(max_len-len(i)) for i in tokenized.values])
attention_mask = np.where(padded != 0, 1, 0)
input_ids = torch.tensor(padded)
attention_mask = torch.tensor(attention_mask)
with torch.no_grad():
last_hidden_states = model(input_ids, attention_mask=attention_mask)
features = last_hidden_states[0][:, 0, :].numpy()
labels = batch_1[1]
train_features, test_features, train_labels, test_labels = train_test_split(features, labels)
lr_clf = LogisticRegression()
lr_clf.fit(train_features, train_labels)
print(lr_clf.score(test_features, test_labels))
def modify_formatting(text):
return text.replace('\n', '\\n').replace('\t', '\\t')
def get_bert():
df = pd.read_csv('../NewExtractionSupervised.csv', header=None, names=['file', 'mda'])
file_names = list(df['file'])
# corpus = [modify_formatting(open(f'{PATH}/{file}').read()) for file in file_names]
model_class, tokenizer_class, pretrained_weights = (ppb.DistilBertModel, ppb.DistilBertTokenizer, 'distilbert-base-uncased')
tokenizer = tokenizer_class.from_pretrained(pretrained_weights)
model = model_class.from_pretrained(pretrained_weights)
mda = list(df['mda'])
X_text = []
y = []
# indices_history = []
# file_history = []
for m, file_name in enumerate(file_names):
text = mda[m].replace('\\n', '\n')
if text == '-9':
continue
file_text = open(f'{PATH}/{file_name}').read()
open_index = file_text.index(text)
management_indices = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
# item_indices = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
interested_locations = [100, 200, 500, 700, 900, 1100, 1500, open_index]
interested_locations += management_indices
# interested_locations += item_indices
for p in interested_locations:
surrounding_text = file_text[p - 400: p + 400].split(' ')[1: -1]
middle = math.trunc(len(surrounding_text) / 2)
# a = len(surrounding_text[middle - 250: middle + 250])
surrounding_text = ' '.join(surrounding_text[middle - 10: middle + 10])
X_text.append(surrounding_text)
y.append(1 if p == open_index else 0)
tokenized = [tokenizer.encode(x, add_special_tokens=True) for x in X_text]
max_len = 0
for i in tokenized:
if len(i) > max_len:
max_len = len(i)
padded = np.array([i + [0] * (max_len - len(i)) for i in tokenized])
# padded = padded[:1000]
# y = y[:1000]
attention_mask = np.where(padded != 0, 1, 0)
input_ids = torch.tensor(padded)
attention_mask = torch.tensor(attention_mask)
with torch.no_grad():
last_hidden_states = model(input_ids, attention_mask=attention_mask)
features = last_hidden_states[0][:, 0, :].numpy()
labels = y
train_features, test_features, train_labels, test_labels = train_test_split(features, labels)
parameters = {'max_depth': [6, 5, 7, 10],
'class_weight': ['balanced']}
rf = RandomForestClassifier()
lr_clf = GridSearchCV(rf, parameters, scoring='recall', n_jobs=4)
# lr_clf = LogisticRegression()
lr_clf.fit(train_features, train_labels)
# print(lr_clf.score(test_features, test_labels))
plot_confusion_matrix(lr_clf, train_features, train_labels, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(lr_clf, test_features, test_labels, normalize='true', cmap='Blues')
plt.show()
if __name__ == "__main__":
# print('a')
# bert_test()
# get_file_corpus()
get_bert()<file_sep>import re
import os
import time
import pickle
import multiprocessing
import pathlib
# import sklearn
# from sklearn.ensemble import RandomForestClassifier
# from sklearn.ensemble.forest import RandomForestClassifier
PATH = '\\'.join(str(pathlib.Path().absolute()).split('\\')[:-1])
DATA_PATH= ''
REGEX_10K = r"(Item[\s]+?7\.[\s\S]*?)(Item[\s]+?8\.)"
OPEN_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial',
'trailing_7A', 'regex_open','leading_see', 'leading_text', 'leading_double_newline',
'is_uppercase', 'trailing_omission', 'leading_table_of_contents', 'leading_newline_count',
'trailing_analysis']
CLOSE_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_8',
'trailing_newline', 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces',
'trailing_financial', 'trailing_7A', 'regex_close', 'leading_see', 'leading_text',
'leading_double_newline', 'is_uppercase', 'trailing_omission',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']
OPEN_CLASSIFIER = pickle.load(open('../opening_random_forest.pkl', 'rb'))
CLOSE_CLASSIFIER = pickle.load(open('../closing_random_forest.pkl', 'rb'))
def get_mda(clf_open, clf_close, file_text):
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
total_text_length = len(file_text)
match = re.findall(REGEX_10K, file_text, re.IGNORECASE)
open_probabilities = []
close_probabilities = []
for index, item in enumerate(items):
if 'management' in file_text[item: item + 50].lower():
trailing_management = 1
else:
trailing_management = 0
if 'analysis' in file_text[item: item + 80].lower():
trailing_analysis = 1
else:
trailing_analysis = 0
if '.' in file_text[item: item + 20]:
trailing_period = 1
else:
trailing_period = 0
if '7' in file_text[item: item + 20]:
trailing_7 = 1
else:
trailing_7 = 0
if '8' in file_text[item: item + 20]:
trailing_8 = 1
else:
trailing_8 = 0
if '\n' in file_text[item: item + 100]:
trailing_newline = 1
else:
trailing_newline = 0
if '7a' in file_text[item: item + 15].lower():
trailing_7A = 1
else:
trailing_7A = 0
if file_text[item: item + 15].isupper():
is_uppercase = 1
else:
is_uppercase = 0
if '\n' in file_text[item - 5: item]:
leading_newline = 1
else:
leading_newline = 0
if '\n\n' in file_text[item - 5: item]:
leading_double_newline = 1
else:
leading_double_newline = 0
if '\t' in file_text[item - 5: item]:
leading_tab = 1
else:
leading_tab = 0
if ' ' in file_text[item - 5: item]:
leading_spaces = 1
else:
leading_spaces = 0
if (len(file_text[item - 40: item].split(' ')) > 2) and ((sum(
[len(w) for w in file_text[item - 40: item].split(' ')]) / len(
file_text[item - 40: item].split(' '))) > 2):
leading_words = 1
else:
leading_words = 0
if 'see' in file_text[item - 10: item].lower():
leading_see = 1
else:
leading_see = 0
if 'with' in file_text[item - 10: item].lower():
leading_with = 1
else:
leading_with = 0
if len(re.findall(r'\w+', file_text[item - 5: item])) > 0:
leading_text = 1
else:
leading_text = 0
if 'financial' in file_text[item: item + 30].lower():
trailing_financial = 1
else:
trailing_financial = 0
if ('applicable' in file_text[item: item + 300].lower()) or ('omitted' in file_text[item: item + 300].lower()):
trailing_omission = 1
else:
trailing_omission = 0
if 'table of contents' in file_text[item - 50: item].lower():
leading_table_of_contents = 1
else:
leading_table_of_contents = 0
leading_newline_count = len(file_text[item - 20: item].split('\n'))
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
regex_close = 1
except IndexError:
e = 1
data = {
'position': item / total_text_length,
'trailing_management': trailing_management,
'trailing_period': trailing_period,
'trailing_7': trailing_7,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_open': regex_open,
'regex_close': regex_close,
'trailing_financial': trailing_financial,
'input_location': index,
'trailing_7A': trailing_7A,
'leading_words': leading_words,
'leading_see': leading_see,
'leading_text': leading_text,
'leading_double_newline': leading_double_newline,
'is_uppercase': is_uppercase,
'trailing_omission': trailing_omission,
'leading_with': leading_with,
'leading_table_of_contents': leading_table_of_contents,
'leading_newline_count': leading_newline_count,
'trailing_analysis': trailing_analysis,
'trailing_8': trailing_8
}
open_features = []
for f in OPEN_INDEPENDENT_VARIABLES:
open_features.append(data[f])
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
open_probability = clf_open.predict([open_features])
close_probability = clf_close.predict([close_features])
open_probabilities.append(open_probability)
close_probabilities.append(close_probability)
try:
open_index = open_probabilities.index(max(open_probabilities))
close_index = close_probabilities.index(max(close_probabilities))
except ValueError:
return ''
if open_index > close_index:
return ''
else:
# return file_text[items[open_index]: items[close_index]]
return file_text[items[open_index]: items[close_index]].replace('\n', '\\n').replace(',', '$%$')
def mp_worker(args):
file_name = str(args)
clf_open = OPEN_CLASSIFIER
clf_close = CLOSE_CLASSIFIER
file_text = open(file_name).read()
mda = get_mda(clf_open, clf_close, file_text)
cik = file_name.split('_')[6]
date = file_name.split('_')[2][-8:]
return file_name, date, cik, mda
def mp_handler(file_names, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
start = time.time()
with open(output_dir, 'w') as f:
for result in p.imap(mp_worker, file_names):
f.write(f'{result[0]},{result[1]},{result[2]},{result[3]}\n')
f.close()
end = time.time()
print(f'Multiple Threads: {round(end - start, 2)} seconds')
if __name__=='__main__':
path_data = 'D:\\SEC Filing Data\\10-X_C_2006-2010'
# # a, years, _ = next(os.walk(f'{path_data}\\2006\\QTR2'))
_, years, _ = next(os.walk(path_data))
# # print(a)
#
for year in years:
_, quarters, _ = next(os.walk(f'{path_data}\\{year}'))
if year == '2006':
continue
for quarter in quarters:
if quarter != 'QTR1':
continue
print(f'Working on {quarter} of {year}...')
output_directory = f'Extracted\\10-K_{year}_{quarter}.csv'
all_directories = []
_, _, directories = next(os.walk(f'{path_data}\\{year}\\{quarter}'))
for directory in directories:
if '_10-K_' in directory:
all_directories += [f'{path_data}\\{year}\\{quarter}\\' + directory]
# print(all_directories)
mp_handler(all_directories, 4, output_directory)
# print(OPEN_CLASSIFIER.feature_importances_)
<file_sep>import os
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import multiprocessing
import csv
import time
STARTING_YEAR = 1994
# STARTING_YEAR = 1993
STARTING_QUARTER = 3
ENDING_YEAR = 2018
ENDING_QUARTER = 4
PATH_TO_EXTRACTED = '../../Text Extraction/Extracted/Quarterly'
OUTPUT_DIRECTORY = 'similarity.csv'
pd.set_option('display.max_columns', 10)
def get_quarter_mda(desired_year, desired_quarter, path_to_extracted):
_, _, file_names = next(os.walk(path_to_extracted))
for file_name in file_names:
if (f'QTR{desired_quarter}' in file_name) and (str(desired_year) in file_name):
return pd.read_csv(f'{path_to_extracted}\\{file_name}', header=None, names=['path', 'date', 'cik', 'mda'])
def compute_similarity(document1, document2):
vectorizer = TfidfVectorizer()
vectorizer.fit([document1, document2])
return cosine_similarity(vectorizer.transform([document1]), vectorizer.transform([document2]))[0][0]
def mp_worker(args):
year = args[0]
quarter = args[1]
result = get_similarity_df(quarter, year)
result['end_of_month'] = result['end_of_month'].astype(str)
return result
def mp_handler(work_list, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
writer = csv.writer(open(output_dir, 'a', newline=''))
counter = 0
for result in p.imap(mp_worker, work_list):
counter += 1
if result is not None:
writer.writerows(result.values.tolist())
# print(result.values.tolist()[:5])
print(f'\rPercentage Complete: {round((counter / len(work_list)) * 100, 2)}%', end="", flush=True)
print('\n')
p.close()
def get_previous_filing(text_t_1, text_t_2):
if (pd.isna(text_t_1)) and (not pd.isna(text_t_2)):
return text_t_2
elif (not pd.isna(text_t_1)) and (pd.isna(text_t_2)):
return text_t_1
elif (not pd.isna(text_t_1)) and (not pd.isna(text_t_2)):
return text_t_1
else:
return None
def get_similarity_df(current_quarter, current_year):
quarter_t = current_quarter
year = current_year
quarter_t_1 = range(4)[current_quarter - 1 - 1] + 1
year_t_1 = year - 1 if current_quarter == 1 else year
quarter_t_2 = range(4)[current_quarter - 2 - 1] + 1
year_t_2 = year - 1 if current_quarter < 2 else year
# if year == STARTING_YEAR and quarter_t == 1:
# return None
if year == STARTING_YEAR and quarter_t == 2:
df_t = get_quarter_mda(year, quarter_t, '../../Text Extraction/Extracted/Quarterly')
df_t_1 = get_quarter_mda(year_t_1, quarter_t_1, '../../Text Extraction/Extracted/Quarterly')
if not (df_t or df_t_1):
return None
# TODO: Drop if length of MDA is super small.... this would indicate an error
df_t = df_t[~df_t['mda'].isna()]
df_t = df_t[df_t['mda'] != '']
df_t = df_t[['date', 'cik', 'mda']]
df_t_1 = df_t_1[~df_t_1['mda'].isna()]
df_t_1 = df_t_1[df_t_1['mda'] != '']
df_t_1 = df_t_1[['cik', 'mda']]
df_all = pd.merge(df_t, df_t_1, how='left', on='cik', suffixes=['_t', '_t_1'])
df_all = df_all[~df_all['mda_t'].isna()]
df_all = df_all[~df_all['mda_t_1'].isna()]
df_all['similarity'] = df_all.apply(lambda x: compute_similarity(x['mda_t'], x['mda_t_1']), axis=1)
df_all['end_of_month'] = df_all['date'].to_period('M').to_timestamp('M')
return df_all[['cik', 'end_of_month', 'similarity']]
else:
df_t = get_quarter_mda(year, quarter_t, '../../Text Extraction/Extracted/Quarterly')
df_t_1 = get_quarter_mda(year_t_1, quarter_t_1, '../../Text Extraction/Extracted/Quarterly')
df_t_2 = get_quarter_mda(year_t_2, quarter_t_2, '../../Text Extraction/Extracted/Quarterly')
df_t = df_t[~df_t['mda'].isna()]
df_t = df_t[df_t['mda'] != '']
df_t = df_t[['date', 'cik', 'mda']]
df_t_1 = df_t_1[~df_t_1['mda'].isna()]
df_t_1 = df_t_1[df_t_1['mda'] != '']
df_t_1 = df_t_1[['cik', 'mda']]
df_t_2 = df_t_2[~df_t_2['mda'].isna()]
df_t_2 = df_t_2[df_t_2['mda'] != '']
df_t_2 = df_t_2[['cik', 'mda']]
df_all = pd.merge(df_t, df_t_1, how='left', on='cik', suffixes=['_t', '_t_1'])
# Get rid of any columns that have no current MD&A text
df_all = df_all[~df_all['mda_t'].isna()]
df_all = pd.merge(df_all, df_t_2, how='left', on='cik')
df_all = df_all.rename(columns={'mda': 'mda_t_2'})
df_all['previous_mda'] = df_all.apply(lambda x: get_previous_filing(x['mda_t_1'], x['mda_t_2']), axis=1)
df_all = df_all[~df_all['previous_mda'].isna()]
df_all['similarity'] = df_all.apply(lambda x: compute_similarity(x['mda_t'], x['previous_mda']), axis=1)
df_all['end_of_month'] = pd.to_datetime(df_all['date'], format='%Y%m%d').dt.to_period('M').dt.to_timestamp('M')
return df_all[['cik', 'end_of_month', 'similarity']]
def main():
list_of_work = []
for year in range(STARTING_YEAR, ENDING_YEAR + 1):
for q in range(4):
if year == STARTING_YEAR and (q + 1 < STARTING_QUARTER):
continue
if year == ENDING_YEAR and q + 1 == ENDING_QUARTER:
break
list_of_work.append((year, q + 1))
start = time.time()
list_of_work.append((2018, 4))
print(list_of_work)
mp_handler(list_of_work, 6, OUTPUT_DIRECTORY)
print(f'Multiple Threads: {round(time.time() - start, 2)} seconds')
if __name__ == "__main__":
main()
# print(get_similarity_df(3, 1994))<file_sep>import pickle
import pandas as pd
import re
import numpy as np
from multiprocessing import Pool
from multiprocessing import cpu_count
import time
OPEN_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial',
'trailing_7A', 'regex_open','leading_see', 'leading_text', 'leading_double_newline',
'is_uppercase', 'trailing_omission', 'leading_table_of_contents', 'leading_newline_count',
'trailing_analysis']
CLOSE_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_8',
'trailing_newline', 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces',
'trailing_financial', 'trailing_7A', 'regex_close', 'leading_see', 'leading_text',
'leading_double_newline', 'is_uppercase', 'trailing_omission',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']
PATH = ''
REGEX_10K = r"(Item[\s]+?7\.[\s\S]*?)(Item[\s]+?8\.)"
def get_mda(clf_open, clf_close, file_text):
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
total_text_length = len(file_text)
match = re.findall(REGEX_10K, file_text, re.IGNORECASE)
open_probabilities = []
close_probabilities = []
for index, item in enumerate(items):
if 'management' in file_text[item: item + 50].lower():
trailing_management = 1
else:
trailing_management = 0
if 'analysis' in file_text[item: item + 80].lower():
trailing_analysis = 1
else:
trailing_analysis = 0
if '.' in file_text[item: item + 20]:
trailing_period = 1
else:
trailing_period = 0
if '7' in file_text[item: item + 20]:
trailing_7 = 1
else:
trailing_7 = 0
if '8' in file_text[item: item + 20]:
trailing_8 = 1
else:
trailing_8 = 0
if '\n' in file_text[item: item + 100]:
trailing_newline = 1
else:
trailing_newline = 0
if '7a' in file_text[item: item + 15].lower():
trailing_7A = 1
else:
trailing_7A = 0
if file_text[item: item + 15].isupper():
is_uppercase = 1
else:
is_uppercase = 0
if '\n' in file_text[item - 5: item]:
leading_newline = 1
else:
leading_newline = 0
if '\n\n' in file_text[item - 5: item]:
leading_double_newline = 1
else:
leading_double_newline = 0
if '\t' in file_text[item - 5: item]:
leading_tab = 1
else:
leading_tab = 0
if ' ' in file_text[item - 5: item]:
leading_spaces = 1
else:
leading_spaces = 0
if (len(file_text[item - 40: item].split(' ')) > 2) and ((sum(
[len(w) for w in file_text[item - 40: item].split(' ')]) / len(
file_text[item - 40: item].split(' '))) > 2):
leading_words = 1
else:
leading_words = 0
if 'see' in file_text[item - 10: item].lower():
leading_see = 1
else:
leading_see = 0
if 'with' in file_text[item - 10: item].lower():
leading_with = 1
else:
leading_with = 0
if len(re.findall(r'\w+', file_text[item - 5: item])) > 0:
leading_text = 1
else:
leading_text = 0
if 'financial' in file_text[item: item + 30].lower():
trailing_financial = 1
else:
trailing_financial = 0
if ('applicable' in file_text[item: item + 300].lower()) or ('omitted' in file_text[item: item + 300].lower()):
trailing_omission = 1
else:
trailing_omission = 0
if 'table of contents' in file_text[item - 50: item].lower():
leading_table_of_contents = 1
else:
leading_table_of_contents = 0
leading_newline_count = len(file_text[item - 20: item].split('\n'))
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
regex_close = 1
except IndexError:
e = 1
data = {
'position': item / total_text_length,
'trailing_management': trailing_management,
'trailing_period': trailing_period,
'trailing_7': trailing_7,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_open': regex_open,
'regex_close': regex_close,
'trailing_financial': trailing_financial,
'input_location': index,
'trailing_7A': trailing_7A,
'leading_words': leading_words,
'leading_see': leading_see,
'leading_text': leading_text,
'leading_double_newline': leading_double_newline,
'is_uppercase': is_uppercase,
'trailing_omission': trailing_omission,
'leading_with': leading_with,
'leading_table_of_contents': leading_table_of_contents,
'leading_newline_count': leading_newline_count,
'trailing_analysis': trailing_analysis,
'trailing_8': trailing_8
}
open_features = []
for f in OPEN_INDEPENDENT_VARIABLES:
open_features.append(data[f])
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
open_probability = clf_open.predict([open_features])
close_probability = clf_close.predict([close_features])
open_probabilities.append(open_probability)
close_probabilities.append(close_probability)
open_index = open_probabilities.index(max(open_probabilities))
close_index = close_probabilities.index(max(close_probabilities))
if open_index > close_index:
return ''
else:
return file_text[items[open_index]: items[close_index]]
def get_all_mda(clf_open, clf_close):
# https://docs.python.org/3/library/multiprocessing.html
def f(x, a, b):
time.sleep(3)
return x * x + a + b
# _, _, file_names = next(
# os.walk(
# f'C:\\Users\\jpetr\\PycharmProjects\\SEC-Analytics\\Data\\10-K Sample'))
#
# for file_name in file_names:
# file_path = f'{PATH}\\{file_name}'
# file_text = open(file_path).read()
# match = re.findall(REGEX_10K, file_text, re.IGNORECASE)
#
# try:
# mda = match[-1][0]
# os.startfile(file_path)
# output_file = open('Text Extraction\\output.txt', 'w')
# output_file.write(mda)
# os.startfile('Text Extraction\\output.txt')
# output_file.close()
# except Exception as e:
# print(e, file_name)
n_pool = 3
l_test = [1, 2, 3, 4, 5, 6, 7, 8] * 50
# Expected time to completion for multithreading:
# (len(l_test) // n_pool + len(l_test) % n_pool) * sleep_time
# Expected time to completion for single-thread:
# len(l_test) * sleep_time
start = time.time()
with Pool(50) as p:
print(p.map(f, l_test))
end = time.time()
print(f'Multiple Threads: {round(end - start, 2)} seconds')
start = time.time()
print(list(map(f, l_test)))
end = time.time()
print(f'Single Thread: {round(end - start, 2)} seconds')
a = 12
b = 41
file = open('testing.csv', 'a')
def f(x):
time.sleep(1)
file.write(f'{x},{x * x + a + b}\n')
return x * x + a + b
if __name__ == "__main__":
# with open('../opening_random_forest.pkl', 'rb') as f:
# clf_open = pickle.load(f)
#
# with open('../closing_random_forest.pkl', 'rb') as f:
# clf_close = pickle.load(f)
#
# file = open('../Data/10-K Sample/20060330_10-K_edgar_data_906780_0000921895-06-000823_1.txt').read()
# print(get_mda(clf_open, clf_close, file))
# match = re.findall(REGEX_10K, file_text, re.IGNORECASE)
#
# try:
# mda = match[-1][0]
# os.startfile(file_path)
# output_file = open('Text Extraction\\output.txt', 'w')
# output_file.write(mda)
# os.startfile('Text Extraction\\output.txt')
# output_file.close()
# except Exception as e:
# print(e, file_name)
n_pool = 3
l_test = [1, 2, 3, 4, 5, 6, 7, 8]
# Expected time to completion for multithreading:
# (len(l_test) // n_pool + len(l_test) % n_pool) * sleep_time
# Expected time to completion for single-thread:
# len(l_test) * sleep_time
start = time.time()
with Pool(5) as p:
print(p.map(f, l_test))
end = time.time()
print(f'Multiple Threads: {round(end - start, 2)} seconds')
# start = time.time()
# print(list(map(f, l_test)))
# end = time.time()
# print(f'Single Thread: {round(end - start, 2)} seconds')
file.close()<file_sep>import re
import os
import csv
import time
import pickle
import multiprocessing
import pathlib
# import sklearn
# from sklearn.ensemble import RandomForestClassifier
# from sklearn.ensemble.forest import RandomForestClassifier
PATH = '\\'.join(str(pathlib.Path().absolute()).split('\\')[:-1])
DATA_PATH = ''
OPEN_INDEPENDENT_VARIABLES = ['position', 'text_size', 'trailing_analysis', 'trailing_discussion', 'leading_item',
'leading_newline_count', 'leading_item_count', 'n_management', 'trailing_uppercase',
'next_newline_distance', 'trailing_newline_count', 'leading_2', 'trailing_continue', 'trailing_item_count']
CLOSE_INDEPENDENT_VARIABLES = ['position', 'text_size', 'trailing_qualitative', 'trailing_procedures', 'leading_item',
'leading_newline_count', 'leading_item_count', 'n_index', 'trailing_uppercase',
'next_newline_distance', 'trailing_newline_count', 'leading_3', 'leading_4',
'trailing_continue', 'trailing_item_count', 'isQuantitativeInDocument', 'isControlsInDocument',
'trailing_word_count', 'next_double_newline_distance', 'leading_word_count']
OPEN_CLASSIFIER = pickle.load(open('../rf_quarterly_open.pkl', 'rb'))
CLOSE_CLASSIFIER = pickle.load(open('../rf_quarterly_close.pkl', 'rb'))
def get_open_text_index(clf_open, file_text):
open_probabilities = []
managements = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
total_text_length = len(file_text)
for i, index in enumerate(managements):
trailing_text = file_text[index:index + 100].lower()
trailing_text_large = file_text[index:index + 500].lower()
leading_text_small_window = file_text[index - 40:index].lower()
leading_text_medium_window = file_text[index - 100:index].lower()
leading_text_large_window = file_text[index - 500:index].lower()
trailing_discussion = 1 if 'discussion' in trailing_text else 0
trailing_continue = 1 if 'continue' in trailing_text_large else 0
trailing_analysis = 1 if 'analysis' in trailing_text else 0
leading_item = 1 if 'item' in leading_text_medium_window else 0
leading_2 = 1 if '2' in leading_text_medium_window else 0
leading_newline_count = len(leading_text_small_window.split('\n'))
trailing_newline_count = len(trailing_text.split('\n'))
leading_item_count = len(leading_text_large_window.split('item'))
trailing_item_count = len(trailing_text_large.split('item'))
trailing_uppercase = 1 if file_text[index:index + 50].isupper() else 0
next_newline_distance = 0
try:
next_newline_distance = trailing_text.index('\n')
except ValueError:
pass
data = {
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_analysis': trailing_analysis,
'trailing_discussion': trailing_discussion,
'leading_item': leading_item,
'leading_newline_count': leading_newline_count,
'leading_item_count': leading_item_count,
'n_management': i,
'trailing_uppercase': trailing_uppercase,
'next_newline_distance': next_newline_distance,
'trailing_newline_count': trailing_newline_count,
'leading_2': leading_2,
'trailing_continue': trailing_continue,
'trailing_item_count': trailing_item_count
}
open_features = []
for f in OPEN_INDEPENDENT_VARIABLES:
open_features.append(data[f])
open_probabilities.append(clf_open.predict([open_features]))
return managements[open_probabilities.index(max(open_probabilities))]
def get_close_text_index(clf_close, file_text):
quantitatives = [m.start() for m in re.finditer('quantitative', file_text, re.IGNORECASE)]
controls = [m.start() for m in re.finditer('controls', file_text, re.IGNORECASE)]
isQuantitativeInDocument = 1 if len(re.findall(r'quantitative[\s\S]+qualitative', file_text, re.IGNORECASE)) > 0 else 0
isControlsInDocument = 1 if len(re.findall(r'controls[\s\S]+procedures', file_text, re.IGNORECASE)) > 0 else 0
total_text_length = len(file_text)
close_probabilities = []
for i, index in enumerate(quantitatives):
trailing_text = file_text[index:index + 100].lower()
trailing_text_large = file_text[index:index + 500].lower()
leading_text_small_window = file_text[index - 40:index].lower()
leading_text_medium_window = file_text[index - 100:index].lower()
leading_text_large_window = file_text[index - 500:index].lower()
trailing_qualitative = 1 if 'qualitative' in trailing_text else 0
trailing_procedures = 1 if 'procedures' in trailing_text else 0
trailing_continue = 1 if 'continue' in trailing_text_large else 0
leading_item = 1 if 'item' in leading_text_small_window else 0
leading_3 = 1 if '3' in leading_text_small_window else 0
leading_4 = 1 if '4' in leading_text_small_window else 0
leading_newline_count = len(leading_text_small_window.split('\n'))
trailing_newline_count = len(trailing_text.split('\n'))
leading_item_count = len(leading_text_large_window.split('item'))
trailing_item_count = len(trailing_text_large.split('item'))
trailing_uppercase = 1 if file_text[index:index + 50].isupper() else 0
try:
trailing_word_count = len(trailing_text_large.split('\n\n')[0].split(' '))
except IndexError:
trailing_word_count = 50
try:
leading_word_count = len(leading_text_medium_window.split('\n\n')[0].split(' '))
except IndexError:
leading_word_count = 50
next_newline_distance = 100
try:
next_newline_distance = trailing_text.index('\n')
except ValueError:
pass
next_double_newline_distance = 500
try:
next_double_newline_distance = trailing_text_large.index('\n\n')
except ValueError:
pass
data = {
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_qualitative': trailing_qualitative,
'trailing_procedures': trailing_procedures,
'leading_item': leading_item,
'leading_newline_count': leading_newline_count,
'leading_item_count': leading_item_count,
'n_index': i,
'trailing_uppercase': trailing_uppercase,
'next_newline_distance': next_newline_distance,
'trailing_newline_count': trailing_newline_count,
'leading_3': leading_3,
'leading_4': leading_4,
'trailing_continue': trailing_continue,
'trailing_item_count': trailing_item_count,
'isQuantitativeInDocument': isQuantitativeInDocument,
'isControlsInDocument': isControlsInDocument,
'trailing_word_count': trailing_word_count,
'leading_word_count': leading_word_count,
'next_double_newline_distance': next_double_newline_distance
}
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
close_probabilities.append(clf_close.predict([close_features]))
try:
quantitative_max = close_probabilities.index(max(close_probabilities))
except ValueError:
quantitative_max = None
close_probabilities = []
for i, index in enumerate(controls):
trailing_text = file_text[index:index + 100].lower()
trailing_text_large = file_text[index:index + 500].lower()
leading_text_small_window = file_text[index - 40:index].lower()
leading_text_medium_window = file_text[index - 100:index].lower()
leading_text_large_window = file_text[index - 500:index].lower()
trailing_qualitative = 1 if 'qualitative' in trailing_text else 0
trailing_procedures = 1 if 'procedures' in trailing_text else 0
trailing_continue = 1 if 'continue' in trailing_text_large else 0
leading_item = 1 if 'item' in leading_text_small_window else 0
leading_3 = 1 if '3' in leading_text_small_window else 0
leading_4 = 1 if '4' in leading_text_small_window else 0
leading_newline_count = len(leading_text_small_window.split('\n'))
trailing_newline_count = len(trailing_text.split('\n'))
leading_item_count = len(leading_text_large_window.split('item'))
trailing_item_count = len(trailing_text_large.split('item'))
trailing_uppercase = 1 if file_text[index:index + 50].isupper() else 0
try:
trailing_word_count = len(trailing_text_large.split('\n\n')[0].split(' '))
except IndexError:
trailing_word_count = 50
next_newline_distance = 100
next_double_newline_distance = 500
try:
next_newline_distance = trailing_text.index('\n')
except ValueError:
pass
try:
next_double_newline_distance = trailing_text_large.index('\n\n')
except ValueError:
pass
try:
leading_word_count = len(leading_text_medium_window.split('\n\n')[0].split(' '))
except IndexError:
leading_word_count = 50
data = {
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_qualitative': trailing_qualitative,
'trailing_procedures': trailing_procedures,
'leading_item': leading_item,
'leading_newline_count': leading_newline_count,
'leading_item_count': leading_item_count,
'n_index': i,
'trailing_uppercase': trailing_uppercase,
'next_newline_distance': next_newline_distance,
'trailing_newline_count': trailing_newline_count,
'leading_3': leading_3,
'leading_4': leading_4,
'trailing_continue': trailing_continue,
'trailing_item_count': trailing_item_count,
'isQuantitativeInDocument': isQuantitativeInDocument,
'isControlsInDocument': isControlsInDocument,
'trailing_word_count': trailing_word_count,
'next_double_newline_distance': next_double_newline_distance,
'leading_word_count': leading_word_count
}
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
close_probabilities.append(clf_close.predict([close_features]))
try:
controls_max = close_probabilities.index(max(close_probabilities))
except ValueError:
controls_max = None
if quantitative_max and controls_max:
if quantitative_max > controls_max:
return quantitatives[quantitative_max]
else:
return controls[controls_max]
elif quantitative_max and not controls_max:
return quantitatives[quantitative_max]
elif controls_max and not quantitative_max:
return controls[controls_max]
else:
return total_text_length
def mp_worker(args):
file_name = str(args)
clf_open = OPEN_CLASSIFIER
clf_close = CLOSE_CLASSIFIER
file_text = open(file_name).read()
open_index = get_open_text_index(clf_open, file_text)
close_index = get_close_text_index(clf_close, file_text)
mda = file_text[open_index:close_index]
cik = file_name.split('_')[6]
date = file_name.split('_')[2][-8:]
return file_name, date, cik, mda
def mp_handler(file_names, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
start = time.time()
# writer = csv.writer(open(output_dir, 'a', newline=''))
counter = 0
for result in p.imap(mp_worker, file_names):
counter += 1
print(f'\rPercentage Complete: {round((counter / len(file_names)) * 100, 2)}%', end="", flush=True)
# writer.writerow([result[0],result[1],result[2],result[3]])
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(result[3])
output_file.close()
os.startfile('IsThisTheSection.txt')
input('Continue?')
print('\n')
end = time.time()
print(f'Multiple Threads: {round(end - start, 2)} seconds')
if __name__=='__main__':
path_data = 'D:\\SEC Filing Data\\10-X_C_1993-2000'
_, years, _ = next(os.walk(path_data))
for year in years:
if int(year) < 1996:
continue
_, quarters, _ = next(os.walk(f'{path_data}\\{year}'))
for quarter in quarters:
if quarter == 'QTR1' and year == '1996':
continue
print(f'Working on {quarter} of {year}...')
output_directory = f'Extracted\\New\\Quarterly\\10-Q_{year}_{quarter}.csv'
all_directories = []
_, _, directories = next(os.walk(f'{path_data}\\{year}\\{quarter}'))
for directory in directories:
if '_10-Q_' in directory:
all_directories += [f'{path_data}\\{year}\\{quarter}\\' + directory]
# print(all_directories)
mp_handler(all_directories, 1, output_directory)
<file_sep>import pandas as pd
import os
import re
import nltk
import multiprocessing
import gensim
import seaborn as sns
import matplotlib.pyplot as plt
def get_raw_mda():
bankruptcy = pd.read_csv('bankruptcy_cik.csv', index_col=0)
# Get annual files
_, _, files = next(os.walk(f'Extracted'))
mda = pd.DataFrame(columns=['date', 'cik', 'mda'])
for file in files:
extracted = pd.read_csv(f'Extracted\\{file}', header=None,
names=['path', 'date', 'cik', 'mda'])
mda = mda.append(extracted[['date', 'cik', 'mda']], ignore_index=True)
# Get quarterly files
_, _, files = next(os.walk(f'Extracted/Quarterly'))
for file in files:
extracted = pd.read_csv(f'Extracted\\Quarterly\\{file}', header=None,
names=['path', 'date', 'cik', 'mda'])
mda = mda.append(extracted[['date', 'cik', 'mda']], ignore_index=True)
mda['merge_date'] = mda['date'].apply(lambda x: f'{str(x)[:4]}-{str(x)[4:6]}')
all = pd.merge(mda, bankruptcy, left_on=['merge_date', 'cik'], right_on=['date_month', 'cik'])
return all
def clean_raw_text(text, stopwords, lemmatizer):
# Remove punctuation, then strip the text of other spacing
text = re.sub(r'[^\w\s]', '', str(text).lower().strip())
list_text = text.split()
# Remove stop words
if stopwords is not None:
list_text = [word for word in list_text if word not in stopwords]
# Lemmatize words
list_text = [lemmatizer.lemmatize(word) for word in list_text]
# Convert list of tokens to text
text = ' '.join(list_text)
return text
def mp_worker(args):
text = args[0]
nltk_stopwords = args[1]
nltk_lemmatizer = args[2]
clean_mda = clean_raw_text(text, nltk_stopwords, nltk_lemmatizer)
return clean_mda
def mp_handler(text, lemmatizer, stopwords, n_pools):
p = multiprocessing.Pool(n_pools)
args = list(zip(text, [stopwords] * len(text), [lemmatizer] * len(text)))
counter = 0
cleaned_documents = []
for result in p.imap(mp_worker, args):
counter += 1
cleaned_documents.append(result)
print(f'\rPercentage Complete: {round((counter / len(args)) * 100, 2)}%', end="", flush=True)
print('\n')
return cleaned_documents
def lda_stuff(corpus):
lst_corpus = []
for string in corpus:
lst_words = string.split()
lst_grams = [" ".join(lst_words[i:i + 2]) for i in range(0,
len(lst_words), 2)]
lst_corpus.append(lst_grams)
id2word = gensim.corpora.Dictionary(lst_corpus)
dic_corpus = [id2word.doc2bow(word) for word in lst_corpus]
lda_model = gensim.models.ldamodel.LdaModel(corpus=dic_corpus, id2word=id2word, num_topics=50, random_state=123,
update_every=1, chunksize=100, passes=10, alpha='auto',
per_word_topics=True)
## output
lst_dics = []
for i in range(0, 50):
lst_tuples = lda_model.get_topic_terms(i)
for tupla in lst_tuples:
lst_dics.append({"topic": i, "id": tupla[0],
"word": id2word[tupla[0]],
"weight": tupla[1]})
dtf_topics = pd.DataFrame(lst_dics,
columns=['topic', 'id', 'word', 'weight'])
dtf_topics.to_csv('topics.csv')
## plot
# fig, ax = plt.subplots()
# sns.barplot(y="word", x="weight", hue="topic", data=dtf_topics, dodge=False, ax=ax).set_title('Main Topics')
# ax.set(ylabel="", xlabel="Word Importance")
# plt.show()
def surrounding_word_corpus(word, corpus, n_words):
corpus_around_word = []
for document in corpus:
docs = document.split(' ')
for index, doc in enumerate(docs):
if doc == word:
context = []
for i in range(n_words * 2):
try:
context.append(docs[index - n_words + i])
except IndexError:
pass
corpus_around_word.append(' '.join(context))
return corpus_around_word
if __name__ == "__main__":
# Obtain the extracted MD&A sections in dataframe format
data = get_raw_mda()
# _, _, file_path = next(os.walk('../Data/10-K Sample'))
# data = [open(f'../Data/10-K Sample/{file}').read() for file in file_path]
# Clean the raw text
nltk_stopwords = nltk.corpus.stopwords.words("english")
nltk_lemmatizer = nltk.stem.wordnet.WordNetLemmatizer()
# corpus = mp_handler(data, nltk_lemmatizer, nltk_stopwords, 2)
corpus = mp_handler(list(data['mda']), nltk_lemmatizer, nltk_stopwords, 4)
# financial_stopwords = ['january', 'februrary', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october',
# 'november', 'december', 'year', 'fiscal']
lda_stuff(corpus)
# print(nltk_stopwords)
<file_sep>import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import statsmodels.api as sm
import math
from PetriProgramming.PortfolioHelper import plot_cumulative_returns, summary_statistics
pd.set_option('display.max_columns', 80)
pd.set_option('display.max_rows', 100)
pd.set_option('display.min_rows', 100)
def get_data():
similarity = pd.read_csv('../similarity.csv', header=None, names=['cik', 'date', 'similarity'])
similarity['date'] = pd.to_datetime(similarity['date'])
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
security = pd.read_csv('../../Data/CIK_RETURNS2.csv')
security = security[['gvkey', 'datadate', 'tic', 'cusip', 'conm', 'trt1m', 'cik', 'prccm', 'cshom']]
security['trt1m'] = security['trt1m'] / 100
security['datadate'] = pd.to_datetime(security['datadate'], format='%Y%m%d')
security = security.rename(columns={'datadate': 'date'})
df_all = pd.merge(security, ff3f, how='left', on='date')
df_all = pd.merge(df_all, similarity, how='left', on=['cik', 'date'])
df_all['similarity'] = df_all.groupby('cik')['similarity'].ffill()
df_all = df_all[~df_all['similarity'].isna()]
df_all = df_all[~df_all['trt1m'].isna()]
df_all = df_all[(~df_all['prccm'].isna()) & (~df_all['cshom'].isna())]
return df_all
def get_data2():
similarity = pd.read_csv('../similarity.csv', header=None, names=['cik', 'date', 'similarity'])
similarity['date'] = pd.to_datetime(similarity['date'], format='%Y-%m-%d')
similarity['month'] = similarity['date'].dt.month
similarity['year'] = similarity['date'].dt.year
similarity = similarity.rename(columns={'date': 'filing_date'})
returns = pd.read_csv('../../Data/Returns-CRSP.csv')
returns['date'] = pd.to_datetime(returns['date'], format='%Y%m%d')
returns['PRC'] = returns['PRC'].apply(lambda x: abs(x))
returns['RET'] = pd.to_numeric(returns['RET'], errors='coerce')
returns['year'] = returns['date'].dt.year
returns['month'] = returns['date'].dt.month
CIK_link = pd.read_csv('../../Data/CIK_Link.csv')
CIK_link['merge_CUSIP'] = CIK_link['cusip'].apply(lambda x: str(x)[:8])
CIK_link['datadate'] = pd.to_datetime(CIK_link['datadate'], format='%Y%m%d')
CIK_link['month'] = CIK_link['datadate'].dt.month
CIK_link['year'] = CIK_link['datadate'].dt.year
CIK_merged = pd.merge(returns, CIK_link, how='left', left_on=['year', 'month', 'CUSIP'],
right_on=['year', 'month', 'merge_CUSIP'])
df_all = pd.merge(CIK_merged, similarity, how='left', on=['cik', 'year', 'month'])
df_all = df_all[df_all['year'] < 2019]
# df_all['new_filing'] = df_all['similarity'].apply(lambda x: 0 if pd.isna(x) else 1)
df_all['similarity'] = df_all.groupby('PERMNO')['similarity'].ffill()
df_all = df_all[~df_all['similarity'].isna()]
df_all = df_all[~df_all['RET'].isna()]
df_all = df_all[(~df_all['PRC'].isna()) & (~df_all['SHROUT'].isna())]
df_all = df_all[['PERMNO', 'date', 'SHRCD', 'EXCHCD', 'SICCD', 'TICKER', 'COMNAM', 'SHRCLS',
'CUSIP', 'PRC', 'RET', 'SHROUT', 'cik', 'similarity', 'filing_date']]
return df_all
def get_portfolio_monthly(n_portfolios, factor_column, df):
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df['quarter'] = df['date'].dt.quarter
# Gets rid of some multiple stuff...
# This needs to be fixed
df = df.drop_duplicates(subset=['PERMNO', 'date'])
# df['random'] = np.random.rand(df.shape[0])
df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.8 else x)
df['similarity'] = df['similarity'].apply(lambda x: np.nan if x > 0.95 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
# Start where we have more data...
df = df[df['year'] > 1996]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
df_eom = df.drop_duplicates(subset=['PERMNO', 'year', 'month'], keep='last')
df_eom = df_eom[['PERMNO', 'date', 'year', 'month', 'quarter', factor_column, 'market_cap']]
df_eom['group'] = df_eom.groupby(['year', 'month'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
# df_eom['group'] = df_eom.groupby(['year', 'month'])[factor_column].transform(
# lambda x: pd.cut(x=x, bins=n_portfolios, labels=list(range(n_portfolios))))
eom_group_market_cap = df_eom.groupby(['group', 'year', 'month'])['market_cap'].sum().reset_index()
eom_group_market_cap = eom_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eom = pd.merge(df_eom, eom_group_market_cap, how='left', on=['group', 'year', 'month'])
df_eom['weight'] = df_eom['market_cap'] / df_eom['group_market_cap']
# Merge the group
df_eom['merge_year'] = df_eom.apply(lambda x: x['year'] if x['month'] != 12 else x['year'] + 1, axis=1)
df_eom['merge_month'] = df_eom['month'].apply(lambda x: x + 1 if x != 12 else 1)
df_eom = df_eom[['merge_year', 'merge_month', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eom, how='left', left_on=['PERMNO', 'year', 'month'],
right_on=['PERMNO', 'merge_year', 'merge_month'])
# df = df[~df['weight'].isna()]
# df = df[~df['group'].isna()]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
# Validate sorting methodology
# a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
#
# a = df.groupby(['group', 'date'])['weight'].sum().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
df['weighted_return'] = df['weight'] * df['RET']
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
def get_portfolio_returns(n_portfolios, factor_column, df):
"""
Rebalances portfolios monthly to weights determined at previous year's end, reconstitutes at previous year's end
:param n_portfolios: number of portfolios for sorting
:param factor_column: column that we are sorting on
:param df: dataframe of data
:return: returns monthly returns for all portfolios
"""
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df = df.drop_duplicates(subset=['cik', 'date'])
df['market_cap'] = df['prccm'] * df['cshom']
df_eoy = df[df['month'] == 12]
df_eoy = df_eoy[['cik', 'date', 'year', 'month', 'similarity', 'market_cap']]
df_eoy['group'] = df_eoy.groupby(['year'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
eoy_group_market_cap = df_eoy.groupby(['group', 'year'])['market_cap'].sum().reset_index()
eoy_group_market_cap = eoy_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eoy = pd.merge(df_eoy, eoy_group_market_cap, how='left', on=['group', 'year'])
df_eoy['weight'] = df_eoy['market_cap'] / df_eoy['group_market_cap']
# TODO: Firms must have existed when they were sorted! Pls fix.
# Merge the group
df_eoy['merge_year'] = df_eoy['year'] + 1
df_eoy = df_eoy[['merge_year', 'cik', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eoy, how='left', left_on=['cik', 'year'], right_on=['cik', 'merge_year'])
df = df[~df['weight'].isna()]
df = df[~df['group'].isna()]
df['weighted_return'] = df['weight'] * df['trt1m']
df.to_csv('wtf.csv')
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
# TODO: Actual return here will be messed up.... fix later! ????
# TODO: Rename columns and stuff
# TODO: Add Long-Short
# TODO: Why is weight above 1?
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios = pd.merge(portfolios, ff3f, how='left', on=['date'])
return portfolios
def get_portfolio_returns2(n_portfolios, factor_column, df):
"""
Rebalances portfolios monthly to weights determined at previous year's end, reconstitutes at previous year's end
:param n_portfolios: number of portfolios for sorting
:param factor_column: column that we are sorting on
:param df: dataframe of data
:return: returns monthly returns for all portfolios
"""
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df = df.drop_duplicates(subset=['PERMNO', 'date'])
df['random'] = np.random.rand(df.shape[0])
# df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.9 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
df_eoy = df[df['month'] == 12]
df_eoy = df_eoy[['PERMNO', 'date', 'year', 'month', factor_column, 'market_cap']]
df_eoy['group'] = df_eoy.groupby(['year'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
eoy_group_market_cap = df_eoy.groupby(['group', 'year'])['market_cap'].sum().reset_index()
eoy_group_market_cap = eoy_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eoy = pd.merge(df_eoy, eoy_group_market_cap, how='left', on=['group', 'year'])
df_eoy['weight'] = df_eoy['market_cap'] / df_eoy['group_market_cap']
# Merge the group
df_eoy['merge_year'] = df_eoy['year'] + 1
df_eoy = df_eoy[['merge_year', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eoy, how='left', left_on=['PERMNO', 'year'], right_on=['PERMNO', 'merge_year'])
df = df[~df['weight'].isna()]
df = df[~df['group'].isna()]
a = df.groupby(['group', 'date'])['PERMNO'].count()
for i in range(n_portfolios):
plt.plot(a[i])
plt.show()
df['weighted_return'] = df['weight'] * df['RET']
# df.to_csv('wtf.csv')
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
# TODO: Actual return here will be messed up.... fix later! ????
# TODO: Rename columns and stuff
# TODO: Add Long-Short
# TODO: Why is weight above 1?
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
if __name__ == "__main__":
# d = get_data()
# print(len(d))
# print(len(d[~d['cshom'].isna()]))
# print(len(d[~d['prccm'].isna()]))
# print(len(d[(~d['prccm'].isna()) & (~d['cshom'].isna())]))
# d.to_csv('similarity_dataset.csv')
# d = get_data2()
# d.to_csv('new.csv')
# d = pd.read_csv('../new.csv', index_col=0)
# # ports = get_portfolio_returns2(4, 'similarity', d)
# d.groupby('date')['PERMNO'].count().plot()
# plt.show()
# df = pd.read_csv('../../Data/Returns-CRSP.csv')
# df['date'] = pd.to_datetime(df['date'])
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
# print(d)
# print(d.describe())
# ports = get_portfolio_returns2(4, 'similarity', d)
# plot_cumulative_returns(ports, 4)
#
# summary_statistics(ports, 4)
# d = pd.read_csv('similarity_dataset.csv', index_col=0)
# portfolios = get_portfolio_returns(5, 'similarity', d)
# plot_cumulative_returns(portfolios, 5)
d = get_data2()
# p = get_portfolio_quarterly(5, 'similarity', d)
p = get_portfolio_monthly(5, 'similarity', d)
plot_cumulative_returns(p, 5)
summary_statistics(p, 5)<file_sep>import re
import os
import csv
import time
import pickle
import multiprocessing
import pathlib
# import sklearn
# from sklearn.ensemble import RandomForestClassifier
# from sklearn.ensemble.forest import RandomForestClassifier
PATH = '\\'.join(str(pathlib.Path().absolute()).split('\\')[:-1])
DATA_PATH= ''
# REGEX_10K = r"(Item[\s]+?7\.[\s\S]*?)(Item[\s]+?8\.)"
REGEX_10Q = r"(Item[\s]+?2\.[\s\S]*?)(Item[\s]+?3\.)"
OPEN_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_2', 'trailing_newline',
'leading_newline', 'total_size', 'regex_open', 'trailing_analysis']
CLOSE_INDEPENDENT_VARIABLES = ['position', 'trailing_period', 'trailing_3', 'leading_newline', 'total_size',
'leading_tab', 'leading_spaces', 'regex_close', 'trailing_quantitative']
OPEN_CLASSIFIER = pickle.load(open('../open_quarterly_random_forest.pkl', 'rb'))
CLOSE_CLASSIFIER = pickle.load(open('../close_quarterly_random_forest.pkl', 'rb'))
def get_mda(clf_open, clf_close, file_text):
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
total_text_length = len(file_text)
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
open_probabilities = []
open_indices = []
close_probabilities = []
close_indices = []
for index, item in enumerate(items):
trailing_80 = file_text[item: item + 80].lower()
trailing_management = 1 if 'management' in trailing_80 else 0
trailing_analysis = 1 if 'analysis' in trailing_80 else 0
trailing_quantitative = 1 if 'quantitative' in trailing_80 else 0
if sum([trailing_management, trailing_analysis, trailing_quantitative]) < 1:
continue
trailing_50 = file_text[item: item + 50].lower()
trailing_100 = file_text[item: item + 100].lower()
trailing_20 = file_text[item: item + 20].lower()
trailing_period = 1 if '.' in trailing_20 else 0
trailing_2 = 1 if '2' in trailing_20 else 0
trailing_3 = 1 if '3' in trailing_20 else 0
trailing_newline = 1 if '\n' in trailing_100 else 0
leading_newline = 1 if '\n' in file_text[item - 5: item] else 0
leading_spaces = 1 if ' ' in file_text[item - 5: item] else 0
leading_tab = 1 if '\t' in file_text[item - 5: item] else 0
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
regex_close = 1
except IndexError:
e = 1
data = {
'position': item / total_text_length,
'trailing_management': trailing_management,
'trailing_period': trailing_period,
'trailing_2': trailing_2,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'regex_open': regex_open,
'trailing_analysis': trailing_analysis,
'trailing_3': trailing_3,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_close': regex_close,
'trailing_quantitative': trailing_quantitative
}
open_features = []
for f in OPEN_INDEPENDENT_VARIABLES:
open_features.append(data[f])
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
open_probability = clf_open.predict([open_features])
close_probability = clf_close.predict([close_features])
open_probabilities.append(open_probability)
close_probabilities.append(close_probability)
open_indices.append(item)
close_indices.append(item)
try:
open_index = open_probabilities.index(max(open_probabilities))
close_index = close_probabilities.index(max(close_probabilities))
except ValueError:
return ''
if open_index > close_index:
return ''
else:
# return file_text[items[open_index]: items[close_index]]
return file_text[open_indices[open_index]: close_indices[close_index]].replace('\n', '\\n')
def mp_worker(args):
file_name = str(args)
clf_open = OPEN_CLASSIFIER
clf_close = CLOSE_CLASSIFIER
file_text = open(file_name).read()
mda = get_mda(clf_open, clf_close, file_text)
cik = file_name.split('_')[6]
date = file_name.split('_')[2][-8:]
return file_name, date, cik, mda
def mp_handler(file_names, n_pools, output_dir):
p = multiprocessing.Pool(n_pools)
start = time.time()
writer = csv.writer(open(output_dir, 'a', newline=''))
counter = 0
for result in p.imap(mp_worker, file_names):
counter += 1
print(f'\rPercentage Complete: {round((counter / len(file_names)) * 100, 2)}%', end="", flush=True)
writer.writerow([result[0],result[1],result[2],result[3]])
print('\n')
end = time.time()
print(f'Multiple Threads: {round(end - start, 2)} seconds')
if __name__=='__main__':
path_data = 'D:\\SEC Filing Data\\10-X_C_1993-2000'
_, years, _ = next(os.walk(path_data))
for year in years:
if int(year) < 1996:
continue
_, quarters, _ = next(os.walk(f'{path_data}\\{year}'))
for quarter in quarters:
if quarter == 'QTR1' and year == '1996':
continue
print(f'Working on {quarter} of {year}...')
output_directory = f'Extracted\\Quarterly\\10-Q_{year}_{quarter}.csv'
all_directories = []
_, _, directories = next(os.walk(f'{path_data}\\{year}\\{quarter}'))
for directory in directories:
if '_10-Q_' in directory:
all_directories += [f'{path_data}\\{year}\\{quarter}\\' + directory]
# print(all_directories)
mp_handler(all_directories, 4, output_directory)
<file_sep>import os
from gensim.models import Word2Vec, KeyedVectors
import re
import pandas as pd
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.metrics import plot_confusion_matrix
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split, GridSearchCV
import numpy as np
import pickle
# from shap import TreeExplainer, summary_plot
PATH = '../Data/10-Q Sample'
def modify_formatting(text):
return text.replace('\n', ' newline ').replace('\t', ' tab ').replace(' ', ' d_s ').replace('.', ' p ')
def get_n_surrounding_words(text, n, trailing=True):
"""
Searches for text, and retrieves n words either side of the text
"""
if trailing:
return text.split()[1:n + 1]
else:
return text.split()[::-1][1:n + 1]
def create_vector_model():
_, _, file_names = next(
os.walk(PATH))
corpus = [modify_formatting(open(f'{PATH}/{file}').read()) for file in file_names]
tokens = [doc.split() for doc in corpus]
model = Word2Vec(tokens).wv
model.save('mda.wordvectors')
def get_open_features(surrounding_text, file_text, wv, index):
leading_characters = surrounding_text[:500]
trailing_characters = surrounding_text[500:]
leading_characters = modify_formatting(leading_characters)
trailing_characters = modify_formatting(trailing_characters)
leading_words = get_n_surrounding_words(leading_characters, 30, trailing=False)
trailing_words = get_n_surrounding_words(trailing_characters, 30)
min_feature_leading = [0] * 100
max_feature_leading = [0] * 100
for word in leading_words:
try:
embedding = wv[word]
except KeyError:
continue
for i in range(100):
if embedding[i] > max_feature_leading[i]:
max_feature_leading[i] = embedding[i]
if embedding[i] < min_feature_leading[i]:
min_feature_leading[i] = embedding[i]
min_feature_trailing = [0] * 100
max_feature_trailing = [0] * 100
for word in trailing_words:
try:
embedding = wv[word]
except KeyError:
# print(word)
continue
for i in range(100):
if embedding[i] > max_feature_trailing[i]:
max_feature_trailing[i] = embedding[i]
if embedding[i] < min_feature_trailing[i]:
min_feature_trailing[i] = embedding[i]
# item_count = len(surrounding_text.lower().split('item'))
leading_item_count = len(file_text[index - 500: index].lower().split('item'))
trailing_item_count = len(file_text[index: index + 500].lower().split('item'))
leading_newline_count = len(file_text[index - 500: index].lower().split('\n\n'))
trailing_newline_count = len(file_text[index: index + 500].lower().split('\n\n'))
features = min_feature_leading + max_feature_leading + min_feature_trailing + max_feature_trailing + [
leading_item_count, trailing_item_count, leading_newline_count, trailing_newline_count,
leading_item_count * leading_newline_count, trailing_item_count * trailing_newline_count,
index / len(file_text)
]
return features
def create_open_model(wv, df_extracted):
files = list(df_extracted['file'])
mda = list(df_extracted['mda'])
X = []
y = []
indices_history = []
file_history = []
for m, file_name in enumerate(files):
text = mda[m].replace('\\n', '\n')
if text == '-9':
continue
file_text = open(f'{PATH}/{file_name}').read()
open_index = file_text.index(text)
management_indices = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
item_indices = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
interested_locations = [0, 100, 200, 500, 700, 900, 1100, 1500, open_index]
interested_locations += management_indices
interested_locations += item_indices
for p in interested_locations:
surrounding_text = file_text[p - 500: p + 500]
features = get_open_features(surrounding_text, file_text, wv, p)
X.append(features)
y.append(1 if p == open_index else 0)
indices_history.append(p)
file_history.append(file_name)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
parameters = {'max_depth': [6, 5, 7, 10],
'class_weight': ['balanced']}
rf = RandomForestClassifier()
clf = GridSearchCV(rf, parameters, scoring='recall', n_jobs=4)
clf.fit(X_train, y_train)
print(f'Best Score: {clf.best_score_}')
print(f'Best Params: {clf.best_params_}')
#
plot_confusion_matrix(clf, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(clf, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
# explainer = TreeExplainer(clf)
# shap_values = explainer.shap_values(np.array(X))
# summary_plot(shap_values, X)
with open('../../rf_quarterly_open_wv.pkl', 'wb') as f:
pickle.dump(clf, f)
predicted = clf.predict(X)
probs = clf.predict_proba(X)
for i, file in enumerate(file_history):
if predicted[i] == y[i]:
continue
file_text = open(f'../Data/10-Q Sample//{file}').read()
m = indices_history[i]
output_file = open('IsThisTheSection.txt', 'w')
file_text = f'{file_text[:m]}$%$%$%%${file_text[m:]}'
modified_file_text = file_text[m - 600: m + 600]
output_file.write(modified_file_text)
output_file.close()
os.startfile('IsThisTheSection.txt')
print(f'Predicted: {predicted[i]}\nActual : {y[i]}\nProbability: {probs[i]}')
print(X[i][-5:])
input('Continue?')
def testing_open_model(file_name, wv, clf, use_this_path=None):
if use_this_path:
file_text = open(f'{use_this_path}/{file_name}').read()
else:
file_text = open(f'{PATH}/{file_name}').read()
probabilities = []
indices = []
management_indices = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
item_indices = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
interested_locations = management_indices + item_indices
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(file_text)
output_file.close()
os.startfile('IsThisTheSection.txt')
for p in interested_locations:
# close_index = open_index + len(text)
surrounding_text = file_text[p - 500: p + 500]
a = file_text[p: p + 500]
features = get_open_features(surrounding_text, file_text, wv, p)
X = [features]
predicted_prob = clf.predict_proba(X)[0][1]
probabilities.append(predicted_prob)
indices.append(p)
index = probabilities.index(max(probabilities))
index = indices[index]
print(f'Max Probability: {max(probabilities)}')
print(file_text[index: index + 500])
input('Continue?')
def get_close_features(surrounding_text, file_text, wv, index):
# Test: 0.93/0.9
leading_characters = surrounding_text[:500]
trailing_characters = surrounding_text[500:]
leading_characters = modify_formatting(leading_characters)
trailing_characters = modify_formatting(trailing_characters)
leading_words = get_n_surrounding_words(leading_characters, 30, trailing=False)
trailing_words = get_n_surrounding_words(trailing_characters, 30)
min_feature_leading = [0] * 100
max_feature_leading = [0] * 100
isQuantitativeInDocument = 1 if len(re.findall(r'quantitative[\s\S]+qualitative', file_text, re.IGNORECASE)) > 0 else 0
isControlsInDocument = 1 if len(re.findall(r'controls[\s\S]+procedures', file_text, re.IGNORECASE)) > 0 else 0
for word in leading_words:
try:
embedding = wv[word]
except KeyError:
continue
for i in range(100):
if embedding[i] > max_feature_leading[i]:
max_feature_leading[i] = embedding[i]
if embedding[i] < min_feature_leading[i]:
min_feature_leading[i] = embedding[i]
min_feature_trailing = [0] * 100
max_feature_trailing = [0] * 100
for word in trailing_words:
try:
embedding = wv[word]
except KeyError:
# print(word)
continue
for i in range(100):
if embedding[i] > max_feature_trailing[i]:
max_feature_trailing[i] = embedding[i]
if embedding[i] < min_feature_trailing[i]:
min_feature_trailing[i] = embedding[i]
# item_count = len(surrounding_text.lower().split('item'))
leading_item_count = len(file_text[index - 500: index].lower().split('item'))
trailing_item_count = len(file_text[index: index + 500].lower().split('item'))
leading_newline_count = len(file_text[index - 500: index].lower().split('\n\n'))
trailing_newline_count = len(file_text[index: index + 500].lower().split('\n\n'))
features = min_feature_leading + max_feature_leading + min_feature_trailing + max_feature_trailing + [
leading_item_count, trailing_item_count, leading_newline_count, trailing_newline_count,
leading_item_count * leading_newline_count, trailing_item_count * trailing_newline_count,
index / len(file_text), isQuantitativeInDocument, isControlsInDocument
]
return features
def get_close_features_2(surrounding_text, file_text, wv, index):
#Test: 0.93/0.94
leading_characters = surrounding_text[:500]
trailing_characters = surrounding_text[500:]
leading_characters = modify_formatting(leading_characters)
trailing_characters = modify_formatting(trailing_characters)
leading_words = get_n_surrounding_words(leading_characters, 30, trailing=False)
trailing_words = get_n_surrounding_words(trailing_characters, 30)
min_feature_leading = [0] * 100
max_feature_leading = [0] * 100
isQuantitativeInDocument = 1 if len(re.findall(r'quantitative[\s\S]+qualitative', file_text, re.IGNORECASE)) > 0 else 0
isControlsInDocument = 1 if len(re.findall(r'controls[\s\S]+procedures', file_text, re.IGNORECASE)) > 0 else 0
average_leading = [0] * 100
leading_counter = 0
for word in leading_words:
try:
embedding = wv[word]
leading_counter += 1
average_leading = np.add(average_leading, embedding)
except KeyError:
continue
for i in range(100):
if embedding[i] > max_feature_leading[i]:
max_feature_leading[i] = embedding[i]
if embedding[i] < min_feature_leading[i]:
min_feature_leading[i] = embedding[i]
average_leading = np.divide(average_leading, leading_counter)
min_feature_trailing = [0] * 100
max_feature_trailing = [0] * 100
average_trailing = [0] * 100
trailing_counter = 0
for word in trailing_words:
try:
embedding = wv[word]
trailing_counter += 1
average_trailing = np.add(average_trailing, embedding)
except KeyError:
# print(word)
continue
for i in range(100):
if embedding[i] > max_feature_trailing[i]:
max_feature_trailing[i] = embedding[i]
if embedding[i] < min_feature_trailing[i]:
min_feature_trailing[i] = embedding[i]
if trailing_counter != 0:
average_trailing = np.divide(average_trailing, trailing_counter)
# item_count = len(surrounding_text.lower().split('item'))
leading_item_count = len(file_text[index - 500: index].lower().split('item'))
trailing_item_count = len(file_text[index: index + 500].lower().split('item'))
leading_newline_count = len(file_text[index - 500: index].lower().split('\n\n'))
trailing_newline_count = len(file_text[index: index + 500].lower().split('\n\n'))
features = min_feature_leading + max_feature_leading + min_feature_trailing + max_feature_trailing + [
leading_item_count, trailing_item_count, leading_newline_count, trailing_newline_count,
leading_item_count * leading_newline_count, trailing_item_count * trailing_newline_count,
index / len(file_text), isQuantitativeInDocument, isControlsInDocument
] + list(average_leading) + list(average_trailing)
return features
def get_close_features_3(surrounding_text, file_text, wv, index):
#Test: 0.91/0.96
leading_characters = surrounding_text[:500]
trailing_characters = surrounding_text[500:]
leading_characters = modify_formatting(leading_characters)
trailing_characters = modify_formatting(trailing_characters)
leading_words = get_n_surrounding_words(leading_characters, 30, trailing=False)
trailing_words = get_n_surrounding_words(trailing_characters, 40)
min_feature_leading = [0] * 100
max_feature_leading = [0] * 100
isQuantitativeInDocument = 1 if len(re.findall(r'quantitative[\s\S]+qualitative', file_text, re.IGNORECASE)) > 0 else 0
isControlsInDocument = 1 if len(re.findall(r'controls[\s\S]+procedures', file_text, re.IGNORECASE)) > 0 else 0
average_leading = [0] * 100
leading_counter = 0
for word in leading_words:
try:
embedding = wv[word]
leading_counter += 1
average_leading = np.add(average_leading, embedding)
except KeyError:
continue
for i in range(100):
if embedding[i] > max_feature_leading[i]:
max_feature_leading[i] = embedding[i]
if embedding[i] < min_feature_leading[i]:
min_feature_leading[i] = embedding[i]
average_leading = np.divide(average_leading, leading_counter)
min_feature_trailing = [0] * 100
max_feature_trailing = [0] * 100
average_trailing = [0] * 100
trailing_counter = 0
for word in trailing_words:
try:
embedding = wv[word]
trailing_counter += 1
average_trailing = np.add(average_trailing, embedding)
except KeyError:
# print(word)
continue
for i in range(100):
if embedding[i] > max_feature_trailing[i]:
max_feature_trailing[i] = embedding[i]
if embedding[i] < min_feature_trailing[i]:
min_feature_trailing[i] = embedding[i]
if trailing_counter != 0:
average_trailing = np.divide(average_trailing, trailing_counter)
# item_count = len(surrounding_text.lower().split('item'))
leading_item_count = len(file_text[index - 500: index].lower().split('item'))
trailing_item_count = len(file_text[index: index + 500].lower().split('item'))
leading_newline_count = len(file_text[index - 500: index].lower().split('\n\n'))
trailing_newline_count = len(file_text[index: index + 500].lower().split('\n\n'))
features = min_feature_leading + max_feature_leading + min_feature_trailing + max_feature_trailing + [
leading_item_count, trailing_item_count, leading_newline_count, trailing_newline_count,
leading_item_count * leading_newline_count, trailing_item_count * trailing_newline_count,
index / len(file_text), isQuantitativeInDocument, isControlsInDocument
] + list(average_leading) + list(average_trailing)
return features
def create_close_model(wv, df_extracted):
files = list(df_extracted['file'])
mda = list(df_extracted['mda'])
X = []
y = []
indices_history = []
file_history = []
for m, file_name in enumerate(files):
text = mda[m].replace('\\n', '\n').rstrip()
if text == '-9':
continue
file_text = open(f'{PATH}/{file_name}').read()
close_index = file_text.index(text) + len(text)
open_index = file_text.index(text)
newline_indices = [m.start() for m in re.finditer('\n', file_text[open_index: open_index + len(text) + 700], re.IGNORECASE)]
interested_locations = []
for n in newline_indices:
questionable_text = file_text[n + open_index: n + open_index + 500].lower()
if 'item' in questionable_text:
interested_locations.append(n + open_index)
elif 'quantitative' in questionable_text:
interested_locations.append(n + open_index)
elif 'control' in questionable_text:
interested_locations.append(n + open_index)
elif n > len(file_text[open_index:]) - 1000:
interested_locations.append(n + open_index)
elif 'report' in questionable_text:
interested_locations.append(n + open_index)
closest_index_distance = 999
closest_index = 0
for i in interested_locations:
if (i > close_index) and (i - close_index < closest_index_distance):
closest_index_distance = i - close_index
closest_index = i
break
if closest_index_distance > 30:
print(file_name)
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(file_text)
output_file.close()
os.startfile('IsThisTheSection.txt')
input('Continue???')
for p in interested_locations:
surrounding_text = file_text[p - 500: p + 500]
features = get_close_features_3(surrounding_text, file_text, wv, p)
X.append(features)
y.append(1 if p == closest_index else 0)
indices_history.append(p)
file_history.append(file_name)
print(closest_index_distance)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
parameters = {'max_depth': [4, 5, 6],
'class_weight': ['balanced_subsample']}
rf = RandomForestClassifier()
clf = GridSearchCV(rf, parameters, scoring='recall', n_jobs=4)
clf.fit(X_train, y_train)
print(f'Best Score: {clf.best_score_}')
print(f'Best Params: {clf.best_params_}')
#
plot_confusion_matrix(clf, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(clf, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
# explainer = TreeExplainer(clf)
# shap_values = explainer.shap_values(np.array(X))
# summary_plot(shap_values, X)
with open('../rf_quarterly_close_wv.pkl', 'wb') as f:
pickle.dump(clf, f)
# predicted = clf.predict(X)
# probs = clf.predict_proba(X)
# for i, file in enumerate(file_history):
#
#
# if predicted[i] == y[i]:
# continue
# file_text = open(f'../Data/10-Q Sample//{file}').read()
# m = indices_history[i]
#
# output_file = open('IsThisTheSection.txt', 'w')
# file_text = f'{file_text[:m]}$%$%$%%${file_text[m:]}'
# modified_file_text = file_text[m - 600: m + 600]
# output_file.write(modified_file_text)
# output_file.close()
#
# os.startfile('IsThisTheSection.txt')
#
# print(f'Predicted: {predicted[i]}\nActual : {y[i]}\nProbability: {probs[i]}')
# print(X[i][-5:])
# input('Continue?')
def testing_close_model(file_name, wv, clf, use_this_path=None):
if use_this_path:
file_text = open(f'{use_this_path}/{file_name}').read()
else:
file_text = open(f'{PATH}/{file_name}').read()
probabilities = []
indices = []
management_indices = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
item_indices = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
interested_locations = management_indices + item_indices
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(file_text)
output_file.close()
os.startfile('IsThisTheSection.txt')
for p in interested_locations:
# close_index = open_index + len(text)
surrounding_text = file_text[p - 500: p + 500]
a = file_text[p: p + 500]
features = get_open_features(surrounding_text, file_text, wv, p)
X = [features]
predicted_prob = clf.predict_proba(X)[0][1]
probabilities.append(predicted_prob)
indices.append(p)
index = probabilities.index(max(probabilities))
index = indices[index]
print(f'Max Probability: {max(probabilities)}')
print(file_text[index: index + 500])
input('Continue?')
def get_mda(file_text, clf_open, clf_close, wv, add_extra = False):
open_probabilities = []
open_indices = []
management_indices = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
item_indices = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
interested_locations = management_indices + item_indices
for p in interested_locations:
surrounding_text = file_text[p - 500: p + 500]
features = get_open_features(surrounding_text, file_text, wv, p)
X = [features]
predicted_prob = clf_open.predict_proba(X)[0][1]
open_probabilities.append(predicted_prob)
open_indices.append(p)
index = open_probabilities.index(max(open_probabilities))
open_index = open_indices[index]
close_probabilities = []
close_indices = []
newline_indices = [m.start() for m in
re.finditer('\n', file_text[open_index:], re.IGNORECASE)]
interested_locations = []
for n in newline_indices:
questionable_text = file_text[n + open_index: n + open_index + 500].lower()
if 'item' in questionable_text:
interested_locations.append(n + open_index)
elif 'quantitative' in questionable_text:
interested_locations.append(n + open_index)
elif 'control' in questionable_text:
interested_locations.append(n + open_index)
elif n > len(file_text[open_index:]) - 1000:
interested_locations.append(n + open_index)
elif 'report' in questionable_text:
interested_locations.append(n + open_index)
found = False
for p in interested_locations:
surrounding_text = file_text[p - 500: p + 500]
features = get_close_features_3(surrounding_text, file_text, wv, p)
X = [features]
predicted_prob = clf_close.predict_proba(X)[0][1]
if predicted_prob > 0.7:
close_index = p
found = True
break
close_indices.append(p)
close_probabilities.append(predicted_prob)
if not found:
index = close_probabilities.index(max(close_probabilities))
close_index = close_indices[index]
if add_extra:
return file_text[open_index: close_index + 200]
else:
return file_text[open_index: close_index]
def mda_test():
test_files = ['19971002_10-Q_edgar_data_49146_0000950116-97-001820_1.txt',
'19971002_10-Q_edgar_data_835909_0001037979-97-000009_1.txt',
'19971002_10-Q_edgar_data_940944_0000940944-97-000117_1.txt']
for t in test_files:
pa = f'D:/SEC Filing Data/10-X_C_1993-2000/1997/QTR4/{t}'
clf_open = pickle.load(open('../rf_quarterly_open_wv.pkl', 'rb'))
clf_close = pickle.load(open('../rf_quarterly_close_wv.pkl', 'rb'))
file_text_test = open(pa).read()
mda = get_mda(file_text_test, clf_open, clf_close, wv, add_extra=True)
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(mda)
output_file.close()
os.startfile('IsThisTheSection.txt')
test_files = ['20060202_10-Q_edgar_data_775158_0000897069-06-000275_1.txt',
'19970331_10-Q_edgar_data_62262_0001017062-97-000588_1.txt',
'19970331_10-Q_edgar_data_74154_0000950134-97-002523_1.txt']
for t in test_files:
pa = f'{PATH}/{t}'
clf_open = pickle.load(open('../rf_quarterly_open_wv.pkl', 'rb'))
clf_close = pickle.load(open('../rf_quarterly_close_wv.pkl', 'rb'))
file_text_test = open(pa).read()
mda = get_mda(file_text_test, clf_open, clf_close, wv, add_extra=True)
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(mda)
output_file.close()
os.startfile('IsThisTheSection.txt')
test_files = ['20010103_10-Q_edgar_data_70033_0000931763-01-000004_1.txt',
'20010103_10-Q_edgar_data_723254_0000892251-01-000001_1.txt',
'20010103_10-Q_edgar_data_799511_0001010412-01-500003_1.txt',
'20010103_10-Q_edgar_data_949301_0000950153-01-000004_1.txt']
for t in test_files:
pa = f'D:/SEC Filing Data/10-X_C_2001-2005/2001/QTR1/{t}'
clf_open = pickle.load(open('../rf_quarterly_open_wv.pkl', 'rb'))
clf_close = pickle.load(open('../rf_quarterly_close_wv.pkl', 'rb'))
file_text_test = open(pa).read()
mda = get_mda(file_text_test, clf_open, clf_close, wv, add_extra=True)
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(mda)
output_file.close()
os.startfile('IsThisTheSection.txt')
if __name__ == "__main__":
# create_vector_model()
wv = KeyedVectors.load("mda.wordvectors", mmap='r')
# create_open_model(wv)
# df = pd.read_csv('NewExtractionSupervised.csv', header=None, names=['file', 'mda'])
# create_open_model(wv, df)
# clf = pickle.load(open('../rf_quarterly_open_wv.pkl', 'rb'))
# testing('20060202_10-Q_edgar_data_775158_0000897069-06-000275_1.txt', wv, clf)
# testing('19970331_10-Q_edgar_data_62262_0001017062-97-000588_1.txt', wv, clf)
# testing('19970331_10-Q_edgar_data_74154_0000950134-97-002523_1.txt', wv, clf)
# testing('19971002_10-Q_edgar_data_49146_0000950116-97-001820_1.txt', wv, clf, use_this_path='D:/SEC Filing Data/10-X_C_1993-2000/1997/QTR4')
# testing('19971002_10-Q_edgar_data_835909_0001037979-97-000009_1.txt', wv, clf, use_this_path='D:/SEC Filing Data/10-X_C_1993-2000/1997/QTR4')
# testing('19971002_10-Q_edgar_data_940944_0000940944-97-000117_1.txt', wv, clf, use_this_path='D:/SEC Filing Data/10-X_C_1993-2000/1997/QTR4')
# testing_open_model('19971003_10-Q_edgar_data_51410_0000051410-97-000029_1.txt', wv, clf, use_this_path='D:/SEC Filing Data/10-X_C_1993-2000/1997/QTR4')
# df = pd.read_csv('NewExtractionSupervised.csv', header=None, names=['file', 'mda'])
# create_close_model(wv, df)
mda_test()
<file_sep>import os
import pickle
import re
import csv
import pandas as pd
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import plot_confusion_matrix, confusion_matrix, f1_score, recall_score
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import shap
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
import seaborn as sns
# from gensim.models import word2vec, Word2Vec
import tensorflow as tf
import datetime
# https://www.sec.gov/files/form10-q.pdf
PATH = '../../Data/10-Q Sample'
REGEX_10Q = r"(Item[\s]+?2\.[\s\S]*?)(Item[\s]+?3\.)"
OPEN_INDEPENDENT_VARIABLES = ['position', 'trailing_management', 'trailing_period', 'trailing_2', 'trailing_newline',
'leading_newline', 'total_size', 'regex_open', 'trailing_analysis']
CLOSE_INDEPENDENT_VARIABLES = ['position', 'trailing_period', 'trailing_3', 'leading_newline', 'total_size',
'leading_tab', 'leading_spaces', 'regex_close', 'trailing_quantitative']
OPEN_CLASSIFIER = pickle.load(open('../open_quarterly_random_forest.pkl', 'rb'))
CLOSE_CLASSIFIER = pickle.load(open('../close_quarterly_random_forest.pkl', 'rb'))
def get_mda(clf_open, clf_close, file_text):
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
total_text_length = len(file_text)
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
open_probabilities = []
open_indices = []
close_probabilities = []
close_indices = []
for index, item in enumerate(items):
trailing_80 = file_text[item: item + 80].lower()
trailing_management = 1 if 'management' in trailing_80 else 0
trailing_analysis = 1 if 'analysis' in trailing_80 else 0
trailing_quantitative = 1 if 'quantitative' in trailing_80 else 0
if sum([trailing_management, trailing_analysis, trailing_quantitative]) < 1:
continue
trailing_50 = file_text[item: item + 50].lower()
trailing_100 = file_text[item: item + 100].lower()
trailing_20 = file_text[item: item + 20].lower()
trailing_period = 1 if '.' in trailing_20 else 0
trailing_2 = 1 if '2' in trailing_20 else 0
trailing_3 = 1 if '3' in trailing_20 else 0
trailing_newline = 1 if '\n' in trailing_100 else 0
leading_newline = 1 if '\n' in file_text[item - 5: item] else 0
leading_spaces = 1 if ' ' in file_text[item - 5: item] else 0
leading_tab = 1 if '\t' in file_text[item - 5: item] else 0
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
regex_close = 1
except IndexError:
e = 1
data = {
'position': item / total_text_length,
'trailing_management': trailing_management,
'trailing_period': trailing_period,
'trailing_2': trailing_2,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'regex_open': regex_open,
'trailing_analysis': trailing_analysis,
'trailing_3': trailing_3,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_close': regex_close,
'trailing_quantitative': trailing_quantitative
}
open_features = []
for f in OPEN_INDEPENDENT_VARIABLES:
open_features.append(data[f])
close_features = []
for f in CLOSE_INDEPENDENT_VARIABLES:
close_features.append(data[f])
open_probability = clf_open.predict([open_features])
close_probability = clf_close.predict([close_features])
open_probabilities.append(open_probability)
close_probabilities.append(close_probability)
open_indices.append(item)
close_indices.append(item)
try:
open_index = open_probabilities.index(max(open_probabilities))
close_index = close_probabilities.index(max(close_probabilities))
# print('Open: ', open_index)
# print('Close: ', close_index)
except ValueError:
return ''
if open_index > close_index:
return ''
else:
# return file_text[items[open_index]: items[close_index]]
return file_text[open_indices[open_index]: (close_indices[close_index] + 200)].replace('\n', '\\n'), items.index(open_indices[open_index]), items.index(close_indices[close_index])
def get_supervised_mda(new_file_name):
# already_done = []
already_done = open(new_file_name).read()
_, _, file_names = next(
os.walk(PATH))
for file_name in file_names:
if file_name in already_done:
print('file already done!')
continue
file_text = open(f'{PATH}\\{file_name}').read()
mda = get_mda(OPEN_CLASSIFIER, CLOSE_CLASSIFIER, file_text)
output_file = open('IsThisTheSection.txt', 'w')
try:
output_file.write(mda[0].replace('\\n', '\n'))
except IndexError:
output_file.write('')
output_file.close()
modified_file_text = ''
for index, s in enumerate(re.split(r'item', file_text, flags=re.IGNORECASE)):
modified_file_text += f'{s} item $%$ {index} $%$'
modified_file = open('FileInQuestion.txt', 'w')
modified_file.write(modified_file_text.replace('\\n', '\n'))
modified_file.close()
os.startfile('FileInQuestion.txt')
os.startfile('IsThisTheSection.txt')
try:
print(f'\n{file_name}\nOpening: {mda[1]}\nClosing: {mda[2]}')
except IndexError:
print(f'\n{file_name}\nOpening: Nothing!\nClosing: Nothing!')
actual_open = input('What is the open index? :')
actual_close = input('What is the close index? :')
if actual_open == '':
actual_open = mda[1]
if actual_close == '':
actual_close = mda[2]
supervised_file = open(new_file_name, 'a')
supervised_file.write(f'{file_name},{actual_open},{actual_close}\n')
supervised_file.close()
def convert_item_to_management(file_name):
df = pd.read_csv(file_name, header=None, names=['file', 'open', 'close'])
files = list(df['file'])
open_indices = df['open']
for i, file in enumerate(files):
if open_indices[i] == -9:
continue
file_text = open(f'../Data/10-Q Sample//{file}').read()
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
open_item_position = items[open_indices[i]]
managements = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
management_index = -9
for i_m, m in enumerate(managements):
distance = m - open_item_position
if distance > 0:
management_index = i_m
break
supervised_file = open('supervisedManagement.csv', 'a')
supervised_file.write(f'{file},{management_index}\n')
supervised_file.close()
def convert_item_to_new_closing(file_name):
df = pd.read_csv(file_name, header=None, names=['file', 'open', 'close'])
files = list(df['file'])
close_indices = df['close']
for i, file in enumerate(files):
if close_indices[i] == -9:
continue
file_text = open(f'../Data/10-Q Sample//{file}').read()
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
close_item_position = items[close_indices[i]]
quantitatives = [m.start() for m in re.finditer('quantitative', file_text, re.IGNORECASE)]
controls = [m.start() for m in re.finditer('controls', file_text, re.IGNORECASE)]
quantitative_index = -9
control_index = -9
quantitative_distance = None
control_distance = None
for i_m, m in enumerate(quantitatives):
distance = m - close_item_position
if distance > 0:
quantitative_distance = distance
quantitative_index = i_m
break
for i_m, m in enumerate(controls):
distance = m - close_item_position
if distance > 0:
control_distance = distance
control_index = i_m
break
quantitativeOrControl = None
index = None
if (quantitative_index == -9) and (control_index == -9):
pass
elif (quantitative_index == -9):
quantitativeOrControl = 1
index = control_index
elif (control_index == -9):
quantitativeOrControl = 0
index = quantitative_index
else:
if quantitative_distance < control_distance:
quantitativeOrControl = 0
index = quantitative_index
else:
quantitativeOrControl = 1
index = control_index
supervised_file = open('supervisedClosing.csv', 'a')
supervised_file.write(f'{file},{quantitativeOrControl},{index}\n')
supervised_file.close()
def threshold_predict(classifier, X, threshold):
y = classifier.predict_proba(X)
return [1 if prob[1] >= threshold else 0 for prob in y]
def create_feature_space_opening_model():
data = pd.DataFrame(columns=['file', 'open_target', 'position', 'text_size', 'trailing_analysis',
'trailing_discussion', 'leading_item', 'leading_newline_count', 'leading_item_count',
'n_management', 'trailing_uppercase', 'next_newline_distance', 'trailing_newline_count',
'trailing_continue', 'leading_2', 'trailing_item_count'])
df = pd.read_csv('supervisedManagement.csv', header=None, names=['file', 'open', 'close'])
files = list(df['file'])
open_indices = df['open']
for file_i, file in enumerate(files):
file_text = open(f'../Data/10-Q Sample//{file}').read()
managements = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
y_open_index = open_indices[file_i]
total_text_length = len(file_text)
for i, index in enumerate(managements):
trailing_text = file_text[index:index + 100].lower()
trailing_text_large = file_text[index:index + 500].lower()
leading_text_small_window = file_text[index - 40:index].lower()
leading_text_medium_window = file_text[index - 100:index].lower()
leading_text_large_window = file_text[index - 500:index].lower()
trailing_discussion = 1 if 'discussion' in trailing_text else 0
trailing_continue = 1 if 'continue' in trailing_text_large else 0
trailing_analysis = 1 if 'analysis' in trailing_text else 0
leading_item = 1 if 'item' in leading_text_medium_window else 0
leading_2 = 1 if '2' in leading_text_medium_window else 0
leading_newline_count = len(leading_text_small_window.split('\n'))
trailing_newline_count = len(trailing_text.split('\n'))
leading_item_count = len(leading_text_large_window.split('item'))
trailing_item_count = len(trailing_text_large.split('item'))
trailing_uppercase = 1 if file_text[index:index + 50].isupper() else 0
next_newline_distance = 0
try:
next_newline_distance = trailing_text.index('\n')
except ValueError:
pass
open_target = 1 if y_open_index == i else 0
data = data.append(
{
'file': file,
'open_target': open_target,
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_analysis': trailing_analysis,
'trailing_discussion': trailing_discussion,
'leading_item': leading_item,
'leading_newline_count': leading_newline_count,
'leading_item_count': leading_item_count,
'n_management': i,
'trailing_uppercase': trailing_uppercase,
'next_newline_distance': next_newline_distance,
'trailing_newline_count': trailing_newline_count,
'leading_2': leading_2,
'trailing_continue': trailing_continue,
'trailing_item_count': trailing_item_count
}, ignore_index=True)
data.to_csv('supervisedManagement_open.csv')
def create_feature_space_closing_model():
data = pd.DataFrame()
df = pd.read_csv('supervisedClosing.csv', header=None, names=['file', 'type', 'index'])
files = list(df['file'])
close_indices = df['index']
types = df['type']
for file_i, file in enumerate(files):
file_text = open(f'../Data/10-Q Sample//{file}').read()
quantitatives = [m.start() for m in re.finditer('quantitative', file_text, re.IGNORECASE)]
controls = [m.start() for m in re.finditer('controls', file_text, re.IGNORECASE)]
y_close_index = close_indices[file_i]
y_type = types[file_i]
isQuantitativeInDocument = 1 if len(re.findall(r'quantitative[\s\S]+qualitative', file_text, re.IGNORECASE)) > 0else 0
isControlsInDocument = 1 if len(re.findall(r'controls[\s\S]+procedures', file_text, re.IGNORECASE)) > 0 else 0
total_text_length = len(file_text)
for i, index in enumerate(quantitatives):
trailing_text = file_text[index:index + 100].lower()
trailing_text_large = file_text[index:index + 500].lower()
leading_text_small_window = file_text[index - 40:index].lower()
leading_text_medium_window = file_text[index - 100:index].lower()
leading_text_large_window = file_text[index - 500:index].lower()
trailing_qualitative = 1 if 'qualitative' in trailing_text else 0
trailing_procedures = 1 if 'procedures' in trailing_text else 0
trailing_continue = 1 if 'continue' in trailing_text_large else 0
leading_item = 1 if 'item' in leading_text_small_window else 0
leading_3 = 1 if '3' in leading_text_small_window else 0
leading_4 = 1 if '4' in leading_text_small_window else 0
leading_newline_count = len(leading_text_small_window.split('\n'))
trailing_newline_count = len(trailing_text.split('\n'))
leading_item_count = len(leading_text_large_window.split('item'))
trailing_item_count = len(trailing_text_large.split('item'))
trailing_uppercase = 1 if file_text[index:index + 50].isupper() else 0
try:
trailing_word_count = len(trailing_text_large.split('\n\n')[0].split(' '))
except IndexError:
trailing_word_count = 50
try:
leading_word_count = len(leading_text_medium_window.split('\n\n')[0].split(' '))
except IndexError:
leading_word_count = 50
next_newline_distance = 100
try:
next_newline_distance = trailing_text.index('\n')
except ValueError:
pass
next_double_newline_distance = 500
try:
next_double_newline_distance = trailing_text_large.index('\n\n')
except ValueError:
pass
close_target = 1 if (int(y_close_index) == i) and (int(y_type) == 0) else 0
data = data.append(
{
'file': file,
'type': y_type,
'close_target': close_target,
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_qualitative': trailing_qualitative,
'trailing_procedures': trailing_procedures,
'leading_item': leading_item,
'leading_newline_count': leading_newline_count,
'leading_item_count': leading_item_count,
'n_index': i,
'trailing_uppercase': trailing_uppercase,
'next_newline_distance': next_newline_distance,
'trailing_newline_count': trailing_newline_count,
'leading_3': leading_3,
'leading_4': leading_4,
'trailing_continue': trailing_continue,
'trailing_item_count': trailing_item_count,
'isQuantitativeInDocument': isQuantitativeInDocument,
'isControlsInDocument': isControlsInDocument,
'trailing_word_count': trailing_word_count,
'leading_word_count': leading_word_count,
'next_double_newline_distance': next_double_newline_distance
}, ignore_index=True)
for i, index in enumerate(controls):
trailing_text = file_text[index:index + 100].lower()
trailing_text_large = file_text[index:index + 500].lower()
leading_text_small_window = file_text[index - 40:index].lower()
leading_text_medium_window = file_text[index - 100:index].lower()
leading_text_large_window = file_text[index - 500:index].lower()
trailing_qualitative = 1 if 'qualitative' in trailing_text else 0
trailing_procedures = 1 if 'procedures' in trailing_text else 0
trailing_continue = 1 if 'continue' in trailing_text_large else 0
leading_item = 1 if 'item' in leading_text_small_window else 0
leading_3 = 1 if '3' in leading_text_small_window else 0
leading_4 = 1 if '4' in leading_text_small_window else 0
leading_newline_count = len(leading_text_small_window.split('\n'))
trailing_newline_count = len(trailing_text.split('\n'))
leading_item_count = len(leading_text_large_window.split('item'))
trailing_item_count = len(trailing_text_large.split('item'))
trailing_uppercase = 1 if file_text[index:index + 50].isupper() else 0
try:
trailing_word_count = len(trailing_text_large.split('\n\n')[0].split(' '))
except IndexError:
trailing_word_count = 50
next_newline_distance = 100
next_double_newline_distance = 500
try:
next_newline_distance = trailing_text.index('\n')
except ValueError:
pass
try:
next_double_newline_distance = trailing_text_large.index('\n\n')
except ValueError:
pass
try:
leading_word_count = len(leading_text_medium_window.split('\n\n')[0].split(' '))
except IndexError:
leading_word_count = 50
close_target = 1 if (int(y_close_index) == i) and (int(y_type) == 1) else 0
data = data.append(
{
'file': file,
'type': y_type,
'close_target': close_target,
'position': index / total_text_length,
'text_size': total_text_length,
'trailing_qualitative': trailing_qualitative,
'trailing_procedures': trailing_procedures,
'leading_item': leading_item,
'leading_newline_count': leading_newline_count,
'leading_item_count': leading_item_count,
'n_index': i,
'trailing_uppercase': trailing_uppercase,
'next_newline_distance': next_newline_distance,
'trailing_newline_count': trailing_newline_count,
'leading_3': leading_3,
'leading_4': leading_4,
'trailing_continue': trailing_continue,
'trailing_item_count': trailing_item_count,
'isQuantitativeInDocument': isQuantitativeInDocument,
'isControlsInDocument': isControlsInDocument,
'trailing_word_count': trailing_word_count,
'next_double_newline_distance': next_double_newline_distance,
'leading_word_count': leading_word_count
}, ignore_index=True)
data.to_csv('supervised_Close.csv')
def testing():
df = pd.read_csv('supervisedManagement.csv', header=None, names=['file', 'open', 'close'])
files = list(df['file'])
open_indices = df['open']
for i, file in enumerate(files):
file_text = open(f'../Data/10-Q Sample//{file}').read()
managements = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
m = managements[open_indices[i]]
text = file_text[m - 50: m + 300]
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(text)
output_file.close()
os.startfile('IsThisTheSection.txt')
input()
def testing2():
df = pd.read_csv('supervisedClosing.csv', header=None, names=['file', 'type', 'index'])
files = list(df['file'])
indices = df['index']
types = df['type']
for i, file in enumerate(files):
file_text = open(f'../Data/10-Q Sample//{file}').read()
t = int(types[i])
if t == 0:
anchors = [m.start() for m in re.finditer('quantitative', file_text, re.IGNORECASE)]
else:
anchors = [m.start() for m in re.finditer('controls', file_text, re.IGNORECASE)]
a = anchors[int(indices[i])]
text = file_text[a - 50: a + 300]
output_file = open('IsThisTheSection.txt', 'w')
output_file.write(text)
output_file.close()
os.startfile('IsThisTheSection.txt')
input()
def train_open():
df = pd.read_csv('supervisedManagement_open.csv', index_col=0)
features = ['position', 'text_size', 'trailing_analysis', 'trailing_discussion', 'leading_item',
'leading_newline_count', 'leading_item_count', 'n_management', 'trailing_uppercase',
'next_newline_distance', 'trailing_newline_count', 'leading_2', 'trailing_continue', 'trailing_item_count']
X = df[features]
y = df['open_target']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# logistic = GradientBoostingClassifier()
parameters = {'max_depth': [4, 5, 6, 7],
'criterion': ('gini', 'entropy'),
'class_weight': ('balanced', 'balanced_subsample')}
rf = RandomForestClassifier()
clf = GridSearchCV(rf, parameters, scoring='recall')
clf.fit(X, y)
print(f'Best Score: {clf.best_score_}')
print(f'Best Params: {clf.best_params_}')
clf = clf.best_estimator_
plot_confusion_matrix(clf, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(clf, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
with open('../rf_quarterly_open.pkl', 'wb') as f:
pickle.dump(clf, f)
# explainer = shap.TreeExplainer(logistic)
# shap_values = explainer.shap_values(X)
# shap.summary_plot(shap_values, X)
# files = list(df['file'])
# open_indices = df['open_target']
# predicted = logistic.predict(X)
# probs = logistic.predict_proba(X)
# m_indices = list(df['n_management'])
#
#
# for i, file in enumerate(files):
# if predicted[i] == open_indices[i]:
# continue
# file_text = open(f'../Data/10-Q Sample//{file}').read()
# managements = [m.start() for m in re.finditer('management', file_text, re.IGNORECASE)]
# m = managements[m_indices[i]]
# output_file = open('IsThisTheSection.txt', 'w')
# file_text = f'{file_text[:m]}$%$%$%%${file_text[m:]}'
# modified_file_text = file_text[m - 600: m + 600]
# output_file.write(modified_file_text)
# output_file.close()
#
# os.startfile('IsThisTheSection.txt')
#
# print(f'Predicted: {predicted[i]}\nActual : {open_indices[i]}\nProbability: {probs[i]}')
# print(X.iloc[i])
# input('Continue?')
def train_close():
df = pd.read_csv('supervised_Close.csv', index_col=0)
features = ['position', 'text_size', 'trailing_qualitative', 'trailing_procedures', 'leading_item',
'leading_newline_count', 'leading_item_count', 'n_index', 'trailing_uppercase',
'next_newline_distance', 'trailing_newline_count', 'leading_3', 'leading_4',
'trailing_continue', 'trailing_item_count', 'isQuantitativeInDocument', 'isControlsInDocument',
'trailing_word_count', 'next_double_newline_distance', 'leading_word_count']
X = df[features]
y = df['close_target']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
parameters = {'max_depth': [4, 5, 6, 7],
'criterion': ('gini', 'entropy'),
'class_weight': ('balanced', 'balanced_subsample')}
rf = RandomForestClassifier()
clf = GridSearchCV(rf, parameters, scoring='recall')
clf.fit(X, y)
print(f'Best Score: {clf.best_score_}')
print(f'Best Params: {clf.best_params_}')
clf = clf.best_estimator_
plot_confusion_matrix(clf, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(clf, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
with open('../rf_quarterly_close.pkl', 'wb') as f:
pickle.dump(clf, f)
explainer = shap.TreeExplainer(clf)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
files = list(df['file'])
types = list(df['type'])
close_indices = df['close_target']
predicted = clf.predict(X)
probs = clf.predict_proba(X)
m_indices = list(df['n_index'])
for i, file in enumerate(files):
if predicted[i] == close_indices[i]:
continue
file_text = open(f'../Data/10-Q Sample//{file}').read()
if types[i] == 0:
managements = [m.start() for m in re.finditer('quantitative', file_text, re.IGNORECASE)]
else:
managements = [m.start() for m in re.finditer('controls', file_text, re.IGNORECASE)]
m = managements[int(m_indices[i])]
output_file = open('IsThisTheSection.txt', 'w')
file_text = f'{file_text[:m]}$%$%$%%${file_text[m:]}'
modified_file_text = file_text[m - 600: m + 600]
output_file.write(modified_file_text)
output_file.close()
os.startfile('IsThisTheSection.txt')
print(f'Predicted: {predicted[i]}\nActual : {close_indices[i]}\nProbability: {probs[i]}')
print(X.iloc[i])
input('Continue?')
def neural_network_open():
df = pd.read_csv('supervisedManagement_open.csv', index_col=0)
negative_count = len(df[df['open_target'] == 0])
positive_count = len(df[df['open_target'] == 1])
features = ['position', 'text_size', 'trailing_analysis', 'trailing_discussion', 'leading_item',
'leading_newline_count', 'leading_item_count', 'n_management', 'trailing_uppercase',
'next_newline_distance', 'trailing_newline_count', 'leading_2', 'trailing_continue',
'trailing_item_count']
X = df[features]
y = df['open_target']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(128 * 3, activation='relu', input_shape=(len(features),)),
tf.keras.layers.Dense(64 * 1, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(optimizer=tf.keras.optimizers.SGD(lr=0.01),
loss=tf.keras.losses.BinaryCrossentropy())
log_dir = f'logs/fit/{datetime.datetime.now().strftime("%Y%m%d-%H%M%S")}'
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
model.fit(x=X_train,
y=y_train,
epochs=300,
validation_data=(X_test, y_test),
callbacks=[tensorboard_callback],
batch_size=15,
class_weight={0: 1,
1: int(negative_count / positive_count)})
# print(model.predict_classes(X_test))
y_train_predicted = (model.predict(X_train) > 0.5).astype("int32")
y_test_predicted = (model.predict(X_test) > 0.5).astype("int32")
cm = confusion_matrix(y_true=y_train, y_pred=y_train_predicted, normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
plt.show()
cm = confusion_matrix(y_true=y_test, y_pred=y_test_predicted, normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
plt.show()
# confusion_matrix()
if __name__ == "__main__":
# get_supervised_mda('test.csv')
# convert_item_to_management('test.csv')
# create_feature_space_opening_model()
train_open()
# convert_item_to_new_closing('test.csv')
# testing2()
# create_feature_space_closing_model()
# train_close()
# neural_network_open()
<file_sep>from time import time
import matplotlib.pyplot as plt
import pandas as pd
from nltk.stem import WordNetLemmatizer
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF, LatentDirichletAllocation
import pathlib
import os
import re
import string
# from sklearn.datasets import fetch_20newsgroups
PATH = '\\'.join(str(pathlib.Path().absolute()).split('\\')[:-1])
def plot_top_words(model, feature_names, n_top_words, title):
fig, axes = plt.subplots(2, 5, figsize=(60, 30), sharex=True)
axes = axes.flatten()
for topic_idx, topic in enumerate(model.components_):
top_features_ind = topic.argsort()[:-n_top_words - 1:-1]
top_features = [feature_names[i] for i in top_features_ind]
weights = topic[top_features_ind]
ax = axes[topic_idx]
ax.barh(top_features, weights, height=0.7)
ax.set_title(f'Topic {topic_idx +1}',
fontdict={'fontsize': 10})
ax.invert_yaxis()
ax.tick_params(axis='both', which='major', labelsize=10)
for i in 'top right left'.split():
ax.spines[i].set_visible(False)
fig.suptitle(title, fontsize=40)
plt.subplots_adjust(top=0.90, bottom=0.05, wspace=0.90, hspace=0.3)
plt.show()
# Load the 20 newsgroups dataset and vectorize it. We use a few heuristics
# to filter out useless terms early on: the posts are stripped of headers,
# footers and quoted replies, and common English words, words occurring in
# only one document or in at least 95% of the documents are removed.
bankruptcy = pd.read_csv(f'{PATH}\\SEC_Analytics\\Analysis\\bankruptcy_cik.csv', index_col=0)
_, _, files = next(os.walk(f'{PATH}\\SEC_Analytics\\Analysis\\Extracted'))
mda = pd.DataFrame(columns=['date', 'cik', 'mda'])
for file in files:
extracted = pd.read_csv(f'{PATH}\\Analysis\\Extracted\\{file}', header=None, names=['path', 'date', 'cik', 'mda'])
mda = mda.append(extracted[['date', 'cik', 'mda']], ignore_index=True)
_, _, files = next(os.walk(f'{PATH}\\SEC_Analytics\\Analysis\\Extracted\\Quarterly'))
# mda = pd.DataFrame(columns=['date', 'cik', 'mda'])
for file in files:
extracted = pd.read_csv(f'{PATH}\\Analysis\\Extracted\\{file}', header=None, names=['path', 'date', 'cik', 'mda'])
mda = mda.append(extracted[['date', 'cik', 'mda']], ignore_index=True)
mda['merge_date'] = mda['date'].apply(lambda x: f'{str(x)[:4]}-{str(x)[4:6]}')
all = pd.merge(mda, bankruptcy, left_on=['merge_date', 'cik'], right_on=['date_month', 'cik'])
# print()
data_samples = all[all['bankruptcyWithin12Months'] == 1]
lemmatizer = WordNetLemmatizer()
financial_stopwords = {'million', 'year', 'fiscal', 'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december'}
def apply_stuff(x):
x = re.sub(r'[0-9]+', '', x)
x = x.translate(str.maketrans('', '', string.punctuation))
x = set(str(x).split(' '))
try:
return ' '.join([lemmatizer.lemmatize(w) for w in x if str(x).lower() not in financial_stopwords])
except ValueError:
return ''
data_samples = data_samples[~data_samples['mda'].isna()]
data_samples['mda'] = data_samples['mda'].apply(lambda x: str(x).replace('\\n', '\n').replace('$%$', ','))
data_samples['mda'] = data_samples['mda'].apply(lambda x: apply_stuff(x))
data_samples = data_samples['mda']
n_samples = 1000
n_features = 2000
n_components = 10
n_top_words = 10
print("Loading dataset...")
t0 = time()
print("done in %0.3fs." % (time() - t0))
# Use tf-idf features for NMF.
print("Extracting tf-idf features for NMF...")
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2, max_features=n_features,
stop_words='english', ngram_range=(1, 2))
t0 = time()
tfidf = tfidf_vectorizer.fit_transform(data_samples)
print("done in %0.3fs." % (time() - t0))
# Use tf (raw term count) features for LDA.
print("Extracting tf features for LDA...")
tf_vectorizer = CountVectorizer(max_df=0.95, min_df=2, max_features=n_features,
stop_words='english', ngram_range=(1, 2))
t0 = time()
tf = tf_vectorizer.fit_transform(data_samples)
print("done in %0.3fs." % (time() - t0))
print()
# Fit the NMF model
print("Fitting the NMF model (Frobenius norm) with tf-idf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
t0 = time()
nmf = NMF(n_components=n_components, random_state=1,
alpha=.1, l1_ratio=.5).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
plot_top_words(nmf, tfidf_feature_names, n_top_words,
'Topics in NMF model (Frobenius norm)')
# Fit the NMF model
print('\n' * 2, "Fitting the NMF model (generalized Kullback-Leibler "
"divergence) with tf-idf features, n_samples=%d and n_features=%d..."
% (n_samples, n_features))
t0 = time()
nmf = NMF(n_components=n_components, random_state=1,
beta_loss='kullback-leibler', solver='mu', max_iter=1000, alpha=.1,
l1_ratio=.5).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
plot_top_words(nmf, tfidf_feature_names, n_top_words,
'Topics in NMF model (generalized Kullback-Leibler divergence)')
print('\n' * 2, "Fitting LDA models with tf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
lda = LatentDirichletAllocation(n_components=n_components, max_iter=5,
learning_method='online',
learning_offset=50.,
random_state=0)
t0 = time()
lda.fit(tf)
print("done in %0.3fs." % (time() - t0))
tf_feature_names = tf_vectorizer.get_feature_names()
plot_top_words(lda, tf_feature_names, n_top_words, 'Topics in LDA model')<file_sep>import os
import re
import time
# MISSTATED FILINGS
# 1. https://www.sec.gov/Archives/edgar/data/0001025771/000114420406012804/v039306_10-k.txt
# 20060331_10-K_edgar_data_1025771_0001144204-06-012804_1.txt
PATH = "/Data/10-K Sample"
REGEX_10K = r"Item[\s]+?7\.([\s\S]*?)((Item[\s]+?7A\.)|(Item[\s]+?8\.))"
# 1. Item[\s]+?(%\$%)?7\.([\s\S]*?)Item[\\n\s]+?(%\$%)?7A\.
# 2. Item[\s]+?7\.([\s\S]*?)((Item[\s]+?7A\.)|(Item[\s]+?8\.))
# Original: r"Item[\\n\s]?(%\$%)?7\.([\s\S]*?)Item[\\n\s]?(%\$%)?7A\."
REGEX_10Q = r"Item[\\n\s]?(%\$%)?2\.([\s\S]*?)Item[\\n\s]?(%\$%)?3\."
if __name__ == "__main__":
_, _, file_names = next(
os.walk(
f'/Data/10-K Sample'))
for file_name in file_names:
file_path = f'{PATH}\\{file_name}'
file_text = open(file_path).read()
match = re.findall(REGEX_10K, file_text, re.IGNORECASE)
try:
mda = match[-1][0]
os.startfile(file_path)
output_file = open('Text Extraction\\output.txt', 'w')
output_file.write(mda)
os.startfile('Text Extraction\\output.txt')
output_file.close()
except Exception as e:
print(e, file_name)
input('Continue?: 1 or 0')<file_sep>import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import statsmodels.api as sm
import math
def plot_cumulative_returns(df, n_portfolios, log=False):
# Cumulative returns for portfolios
for i in range(n_portfolios):
cumulative_returns = np.cumprod(1 + df[i].values) - 1
if log:
plt.plot(df['date'], np.log(cumulative_returns), label=f'Portfolio {i}')
else:
plt.plot(df['date'], cumulative_returns, label=f'Portfolio {i}')
# Cumulative return for the S&P 500
sp500 = np.cumprod(1 + (df['Mkt-RF'] + df['RF']).values) - 1
if log:
plt.plot(df['date'], np.log(sp500), '--', label=f'S&P 500')
else:
plt.plot(df['date'], sp500, '--', label=f'S&P 500')
# Cumulative return for Long-Short
long_short = np.cumprod(1 + (df[n_portfolios - 1] - df[0]).values) - 1
if log:
plt.plot(df['date'], np.log(long_short), '--', label=f'Long {n_portfolios - 1}, Short 0')
else:
plt.plot(df['date'], long_short, '--', label=f'Long {n_portfolios - 1}, Short 0')
plt.legend()
plt.xlabel('Date')
plt.ylabel(f'Cumulative Return{": Log Returns" if log else ""}')
plt.title(f'Cumulative Return of $1 for {n_portfolios} Portfolios')
plt.show()
def ols_helper(est, model_type, portfolio):
coefs = dict(est.params)
# standard_error = est.bse
r2 = est.rsquared
r2_adj = est.rsquared_adj
t_values = est.tvalues
p_values = est.pvalues
data = []
for x in range(len(coefs)):
d = [model_type, portfolio, r2, r2_adj, list(coefs.keys())[x], list(coefs.values())[x],
t_values[x], p_values[x]]
data.append(d)
result = pd.DataFrame(np.array(data), columns=['Model', 'Portfolio', 'R^2', 'Adj. R^2', 'Factor', 'Coef.', 't-Value', 'p-Value'])
return result
def summary_statistics(df, n_portfolios):
d = pd.DataFrame()
perf = pd.DataFrame()
FF3F_X = sm.add_constant(df[['Mkt-RF', 'SMB', 'HML']])
CAPM_X = sm.add_constant(df[['Mkt-RF']])
for i in range(n_portfolios):
y = df[i]
FF3F_est = sm.OLS(y, FF3F_X).fit()
CAPM_est = sm.OLS(y, CAPM_X).fit()
ff3f = ols_helper(est=FF3F_est, model_type='FF3F', portfolio=i)
capm = ols_helper(est=CAPM_est, model_type='CAPM', portfolio=i)
d = d.append(ff3f)
d = d.append(capm)
perf = perf.append(portfolio_performance(y, FF3F_est, 'FF3F', i))
perf = perf.append(portfolio_performance(y, CAPM_est, 'CAPM', i))
# Long short
y = (df[n_portfolios - 1] - df[0])
FF3F_est = sm.OLS(y, FF3F_X).fit()
CAPM_est = sm.OLS(y, CAPM_X).fit()
ff3f = ols_helper(est=FF3F_est, model_type='FF3F', portfolio=f'L{n_portfolios - 1}S0')
capm = ols_helper(est=CAPM_est, model_type='CAPM', portfolio=f'L{n_portfolios - 1}S0')
d = d.append(ff3f)
d = d.append(capm)
perf = perf.append(portfolio_performance(y, FF3F_est, 'FF3F', f'L{n_portfolios - 1}S0'))
perf = perf.append(portfolio_performance(y, CAPM_est, 'CAPM', f'L{n_portfolios - 1}S0'))
# Market
y = (df['Mkt-RF'] + df['RF'])
CAPM_est = sm.OLS(y, CAPM_X).fit()
perf = perf.append(portfolio_performance(y, CAPM_est, 'CAPM', f'Market', is_market=True))
print(d)
print(perf)
# for i in range(n_portfolios):
# r = df[i].mean() * 12
# risk = df[i].std() * math.sqrt(12)
# sharpe = r / risk
#
# print(f'Sharpe for Portfolio {i}: {sharpe}')
# print(f'Risk for Portfolio {i}: {risk}')
# print(f'Return for Portfolio {i}: {r}')
#
# r = (df['Mkt-RF'] + df['RF']).mean() * 12
# risk = (df['Mkt-RF'] + df['RF']).std() * math.sqrt(12)
# sharpe = r / risk
#
# print(f'Sharpe for Market: {sharpe}')
# print(f'Risk for Market: {risk}')
# print(f'Return for Market: {r}')
# r = (df[n_portfolios - 1] - df[0]).mean() * 12
# risk = (df[n_portfolios - 1] - df[0]).std() * math.sqrt(12)
# sharpe = r / risk
#
# print(f'Sharpe for L{n_portfolios - 1}S{0}: {sharpe}')
# print(f'Risk for L{n_portfolios - 1}S{0}: {risk}')
# print(f'Return for L{n_portfolios - 1}S{0}: {r}')
def portfolio_performance(returns, est, model_type, portfolio, is_market=False):
coefs = dict(est.params)
t_values = est.tvalues
if not is_market:
d = [
model_type,
portfolio,
f'{round(list(coefs.values())[0] * 12 * 100, 2)}%',
round(t_values[0], 4),
f'{round(returns.mean() * 12 * 100, 2)}%',
f'{round(returns.std() * math.sqrt(12) * 100, 2)}%',
round((returns.mean() * 12) / (returns.std() * math.sqrt(12)), 4)
]
else:
d = [
'',
'Market',
'',
'',
f'{round(returns.mean() * 12 * 100, 2)}%',
f'{round(returns.std() * math.sqrt(12) * 100, 2)}%',
round((returns.mean() * 12) / (returns.std() * math.sqrt(12)), 4)
]
result = pd.DataFrame(np.array([d]),
columns=['Model', 'Portfolio', 'Alpha', 't-Stat', 'Return', 'Risk', 'Sharpe'])
return result
<file_sep>import pandas as pd
import os
if __name__ == "__main__":
_, _, directories = next(os.walk('../EntireFile/Cosine/TfIdf/Quarterly'))
df_all = pd.DataFrame()
for d in directories:
df = pd.read_csv(f'EntireFile/Quarterly/{d}', header=None, names=['date', 'cik', 'similarity'])
df_all = df_all.append(df)
print(df_all)
df_all.to_csv('EntireFileSimilarity.csv')
<file_sep>import pickle
import os
import re
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import plot_roc_curve, plot_confusion_matrix, classification_report
from sklearn.model_selection import train_test_split
import numpy as np
import matplotlib.pyplot as plt
import shap
from sklearn.model_selection import cross_validate
from tensorflow.keras.models import Sequential
from tensorflow.keras import layers
import tensorflow.keras as keras
from sklearn.utils import class_weight
from sklearn.metrics import confusion_matrix
import seaborn as sns
PATH = "/Data/10-Q Sample"
# PATH = "D:\\Python\\Projects\\FinTech\\SEC-Analytics\\Data\\10-Q Sample"
# PATH = "C:\\Users\\jpetr\\PycharmProjects\\SEC-Analytics\\Data\\10-K Sample"
REGEX_10K = r"(Item[\s]+?7\.[\s\S]*?)(Item[\s]+?8\.)"
REGEX_10Q = r"(Item[\s]+?2\.[\s\S]*?)(Item[\s]+?3\.)"
pd.options.display.width = 0
# pd.set_printoptions(max_rows=200, max_columns=10)
pd.set_option('display.max_columns', 10)
pd.set_option('display.max_rows', 100)
def get_supervised(new_file_name):
already_done = []
already_done = open('QuarterlySupervised.csv').read()
_, _, file_names = next(
os.walk(PATH))
# f'C:\\Users\\jpetr\\PycharmProjects\\SEC-Analytics\\Data\\10-K Sample'))
for file_name in file_names:
if file_name in already_done:
print('file already done!')
continue
file_path = f'{PATH}\\{file_name}'
file_text = open(file_path).read()
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
opening_index = None
closing_index = None
try:
mda = match[-1][0]
modified_file_text = ''
for index, s in enumerate(re.split(r'item', file_text, flags=re.IGNORECASE)):
modified_file_text += f'{s} item $%$ {index} $%$'
for index, item in enumerate(items):
# Check for beginning index
if file_text[item: item + len(mda)] == mda:
opening_index = index
closing_index = min(range(len(items)), key=lambda i: abs(items[i] - (item + len(mda))))
modified_file = open('modified.txt', 'w')
modified_file.write(modified_file_text)
modified_file.close()
os.startfile('modified.txt')
# os.startfile(file_path)
output_file = open('output.txt', 'w')
output_file.write(mda)
output_file.close()
os.startfile('output.txt')
except Exception as e:
print(e, file_name)
print(f'\n{file_name}\nOpening: {opening_index}\nClosing: {closing_index}')
actual_open = input('What is the open index? :')
actual_close = input('What is the close index? :')
if actual_open == '':
actual_open = opening_index
if actual_close == '':
actual_close = closing_index
supervised_file = open(new_file_name, 'a')
supervised_file.write(f'{file_name},{actual_open},{actual_close}\n')
supervised_file.close()
def get_classification_quarterly():
df = pd.read_csv('QuarterlySupervised.csv')
# print(df)
df = df[df['open'] != 'None']
df = df[df['close'] != 'None']
data = pd.DataFrame(columns=['file', 'true_location_open', 'true_location_close', 'input_location', 'position', 'trailing_management',
'trailing_period', 'trailing_2', 'y_open', 'y_close', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'regex_open',
'regex_close', 'trailing_financial', 'leading_words', 'leading_see',
'leading_text', 'leading_double_newline', 'is_uppercase', 'trailing_omission', 'leading_with',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis', 'trailing_3', 'trailing_quantitative'])
# X_open = pd.DataFrame()
# X_close = pd.DataFrame()
for df_index, row in df.iterrows():
y_close = []
y_open = []
file_text = open(f'{PATH}\\{row.file}').read()
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
close_items = [0] * len(items)
close_items[int(row.close)] = 1
open_items = [0] * len(items)
open_items[int(row.open)] = 1
y_close += close_items
y_open += open_items
total_text_length = len(file_text)
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
for index, item in enumerate(items):
# Probably shitty? Make smaller window.
if 'management' in file_text[item: item + 80].lower():
# if 'management' in file_text[item - 100: item + 100].lower():
trailing_management = 1
else:
trailing_management = 0
if 'quantitative' in file_text[item: item + 80].lower():
trailing_quantitative = 1
else:
trailing_quantitative = 0
if 'analysis' in file_text[item: item + 80].lower():
# if 'management' in file_text[item - 100: item + 100].lower():
trailing_analysis = 1
else:
trailing_analysis = 0
if '.' in file_text[item: item + 20]:
trailing_period = 1
else:
trailing_period = 0
if '2' in file_text[item: item + 20]:
trailing_2 = 1
else:
trailing_2 = 0
if '3' in file_text[item: item + 20]:
trailing_3 = 1
else:
trailing_3 = 0
if '\n' in file_text[item: item + 100]:
trailing_newline = 1
else:
trailing_newline = 0
if file_text[item: item + 15].isupper():
is_uppercase = 1
else:
is_uppercase = 0
if '\n' in file_text[item - 5: item]:
leading_newline = 1
else:
leading_newline = 0
if '\n\n' in file_text[item - 5: item]:
leading_double_newline = 1
else:
leading_double_newline = 0
if '\t' in file_text[item - 5: item]:
leading_tab = 1
else:
leading_tab = 0
if ' ' in file_text[item - 5: item]:
leading_spaces = 1
else:
leading_spaces = 0
if (len(file_text[item - 40: item].split(' ')) > 2) and ((sum([len(w) for w in file_text[item - 40: item].split(' ')]) / len(file_text[item - 40: item].split(' '))) > 2):
leading_words = 1
else:
leading_words = 0
if 'see' in file_text[item - 10: item].lower():
leading_see = 1
else:
leading_see = 0
if 'with' in file_text[item - 10: item].lower():
leading_with = 1
else:
leading_with = 0
if len(re.findall(r'\w+', file_text[item - 5: item])) > 0:
leading_text = 1
else:
leading_text = 0
if 'financial' in file_text[item: item + 30].lower():
trailing_financial = 1
else:
trailing_financial = 0
if ('applicable' in file_text[item: item + 300].lower()) or ('omitted' in file_text[item: item + 300].lower()):
trailing_omission = 1
else:
trailing_omission = 0
if 'table of contents' in file_text[item - 50: item].lower():
leading_table_of_contents = 1
else:
leading_table_of_contents = 0
leading_newline_count = len(file_text[item - 20: item].split('\n'))
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
# regex_close = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
# regex_open = 1
regex_close = 1
except IndexError:
e = 1
data = data.append(
{
'file': row.file,
'position': item / total_text_length,
'y_open': y_open[index],
'trailing_management': trailing_management,
'y_close': y_close[index],
'trailing_period': trailing_period,
'trailing_2': trailing_2,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_open': regex_open,
'regex_close': regex_close,
'trailing_financial': trailing_financial,
'true_location_open': row.open,
'true_location_close': row.close,
'input_location': index,
'leading_words': leading_words,
'leading_see': leading_see,
'leading_text': leading_text,
'leading_double_newline': leading_double_newline,
'is_uppercase': is_uppercase,
'trailing_omission': trailing_omission,
'leading_with': leading_with,
'leading_table_of_contents': leading_table_of_contents,
'leading_newline_count': leading_newline_count,
'trailing_analysis': trailing_analysis,
'trailing_3': trailing_3,
'trailing_quantitative': trailing_quantitative
}, ignore_index=True)
data.to_csv('quarterlyClassification.csv')
def open_train_quarterly():
df = pd.read_csv('quarterlyClassification.csv', index_col=0)
df = df[df['y_open'] != 'None']
df['true_class'] = np.where(df['input_location'] == df['true_location_open'], 1, 0)
# df['regex_class'] = np.where(df['regex_open'] == df['true_location_open'], 1, 0)
X = df[list(df.columns)]
y = df['y_open']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# cm = confusion_matrix(y_true=y, y_pred=X['regex_open'])
cm = confusion_matrix(y_true=y_train, y_pred=X_train['regex_open'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
plt.show()
cm = confusion_matrix(y_true=y_test, y_pred=X_test['regex_open'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues')
plt.show()
# logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# logistic = LogisticRegression(class_weight='balanced', random_state=69)
#
X = df[['position', 'trailing_management', 'trailing_period', 'trailing_2', 'trailing_newline', 'leading_newline',
'total_size', 'regex_open', 'trailing_analysis']]
y = df['y_open'].astype(int)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
logistic = RandomForestClassifier(class_weight='balanced', max_depth=5, random_state=699, n_estimators=1000)
# # # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
with open('../open_quarterly_random_forest.pkl', 'wb') as f:
pickle.dump(logistic, f)
plot_confusion_matrix(logistic, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(logistic, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
def close_train_quarterly():
df = pd.read_csv('quarterlyClassification.csv', index_col=0)
df = df[df['y_close'] != 'None']
df['true_class'] = np.where(df['input_location'] == df['true_location_open'], 1, 0)
# df['regex_class'] = np.where(df['regex_open'] == df['true_location_open'], 1, 0)
X = df[list(df.columns)]
y = df['y_close']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# cm = confusion_matrix(y_true=y, y_pred=X['regex_open'])
cm = confusion_matrix(y_true=y_train, y_pred=X_train['regex_close'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
plt.show()
cm = confusion_matrix(y_true=y_test, y_pred=X_test['regex_close'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues')
plt.show()
# logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# logistic = LogisticRegression(class_weight='balanced', random_state=69)
# X = df[['position', 'trailing_management', 'trailing_period', 'trailing_3', 'trailing_newline',
# 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'regex_close',
# 'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
# 'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis', 'trailing_quantitative']]
X = df[['position', 'trailing_period', 'trailing_3', 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'regex_close',
'trailing_quantitative']]
y = df['y_close'].astype(int)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
logistic = RandomForestClassifier(class_weight='balanced', max_depth=5, random_state=699, n_estimators=1000)
logistic.fit(X_train, y_train)
with open('../close_quarterly_random_forest.pkl', 'wb') as f:
pickle.dump(logistic, f)
plot_confusion_matrix(logistic, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(logistic, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
#
# plot_roc_curve(logistic, X_test, y_test)
# plt.show()
# plot_roc_curve(logistic, X_train, y_train)
# plt.show()
#
# print(classification_report(y_test, logistic.predict(X_test)))
# print(logistic.predict_proba(X_test)[:, 1])
# positive = df[df['y_close'] == 0]
#
# df = pd.DataFrame(list(logistic.predict_proba(positive[['position', 'trailing_period', 'trailing_3', 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'regex_close',
# 'trailing_quantitative']])[:, 1]), columns=['probability'])
# df['probability'].hist(bins=100)
# plt.show()
# def test_file(file_directory, open_model, close_model):
if __name__ == "__main__":
# close_training()
# test()
# import pickle
# with open('opening_random_forest.pkl', 'rb') as f:
# clf = pickle.load(f)
#
# clf.predict(X_train)
# get_supervised('QuarterlySupervised.csv')
# test_quarterly()
close_train_quarterly()
# open_train_quarterly()<file_sep>from sklearn.feature_extraction.text import TfidfVectorizer
import pandas as pd
from sklearn.decomposition import LatentDirichletAllocation
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
import pathlib
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
import os
import re
import string
# TODO: Consider looking at random samples of bankruptcy subset and non-bankrupt subset and make comparisions of frequency of bigrams.
# This could lead to feature creation!
PATH = '\\'.join(str(pathlib.Path().absolute()).split('\\')[:-1])
def text_stuff(df):
# vectorizer = TfidfVectorizer()
# df = pd.read_csv('results.csv')
lemmatizer = WordNetLemmatizer()
stop_words = set(stopwords.words('english'))
df = df[~df['mda'].isna()]
df['mda'] = df['mda'].apply(lambda x: str(x).replace('\\n', '\n').replace('$%$', ','))
def apply_stuff(x):
x = re.sub(r'[0-9]+', '', x)
x = x.translate(str.maketrans('', '', string.punctuation))
x = str(x).split(' ')
try:
return ' '.join([lemmatizer.lemmatize(w) for w in x if not w in stop_words])
except ValueError:
return ''
df['mda'] = df['mda'].apply(lambda x: apply_stuff(x))
corpus = list(df['mda'])
count = CountVectorizer(ngram_range=(2, 2))
count.fit(corpus)
l = LatentDirichletAllocation(n_components=20)
l.fit(corpus)
# X = count.transform(corpus)
#
# transformer = TfidfTransformer()
# X = transformer.fit_transform(X)
#
# tfidf = pd.DataFrame(data={'n_gram': list(count.vocabulary_.keys()), 'frequency': list(count.vocabulary_.values()), 'tfidf':transformer.idf_})
# print(tfidf.sort_values(by='tfidf', ascending=False).to_csv('test2.csv'))
def lda(df):
l = LatentDirichletAllocation(n_components=20)
l.fit(df['mda'])
print('')
if __name__ == "__main__":
bankruptcy = pd.read_csv('bankruptcy_cik.csv', index_col=0)
_, _, files = next(os.walk(f'{PATH}\\Analysis\\Extracted'))
mda = pd.DataFrame(columns=['date', 'cik', 'mda'])
for file in files:
extracted = pd.read_csv(f'{PATH}\\Analysis\\Extracted\\{file}', header=None, names=['path', 'date', 'cik', 'mda'])
mda = mda.append(extracted[['date', 'cik', 'mda']], ignore_index=True)
# print('a')
mda['merge_date'] = mda['date'].apply(lambda x: f'{str(x)[:4]}-{str(x)[4:6]}')
all = pd.merge(mda, bankruptcy, left_on=['merge_date', 'cik'], right_on=['date_month', 'cik'])
# print()
data = all[all['bankruptcyWithin12Months'] == 1]
lda(data)
# text_stuff(all[all['bankruptcyWithin12Months'] == 1])<file_sep>import pandas as pd
import numpy as np
import os
from PetriProgramming.PortfolioHelper import plot_cumulative_returns, summary_statistics
import matplotlib.pyplot as plt
pd.set_option('display.max_columns', 80)
pd.set_option('display.max_rows', 25)
pd.set_option('display.min_rows', 25)
pd.options.display.width = 150
# SIMILARITY_PATH_QUARTER = '../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Quarterly'
# SIMILARITY_PATH_ANNUAL = '../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Annual'
# SIMILARITY_PATH_QUARTER = '../MDA/Cosine/CountVectorizerBinary/Quarterly'
# SIMILARITY_PATH_ANNUAL = None
def get_data(path_quarter, path_annual):
# Quarterly Similarity
_, _, directories = next(os.walk(path_quarter))
# TODO: This is all fucked. Multiple 10Q...
similarity = pd.DataFrame()
for d in directories:
df = pd.read_csv(f'{path_quarter}\\{d}', header=None,
names=['date', 'cik', 'similarity'])
similarity = similarity.append(df)
# Annual Similarity
if path_annual:
_, _, directories = next(os.walk(path_annual))
# TODO: This is all fucked. Multiple 10Q...
for d in directories:
df = pd.read_csv(f'{path_annual}\\{d}', header=None,
names=['date', 'cik', 'similarity'])
similarity = similarity.append(df)
similarity['date'] = pd.to_datetime(similarity['date'], format='%Y%m%d')
similarity['month'] = similarity['date'].dt.month
similarity['year'] = similarity['date'].dt.year
similarity = similarity.rename(columns={'date': 'filing_date'})
returns = pd.read_csv('../../Data/Returns-CRSP.csv')
returns['date'] = pd.to_datetime(returns['date'], format='%Y%m%d')
returns['PRC'] = returns['PRC'].apply(lambda x: abs(x))
returns['RET'] = pd.to_numeric(returns['RET'], errors='coerce')
returns['year'] = returns['date'].dt.year
returns['month'] = returns['date'].dt.month
CIK_link = pd.read_csv('../../Data/CIK_Link.csv')
CIK_link['merge_CUSIP'] = CIK_link['cusip'].apply(lambda x: str(x)[:8])
CIK_link['datadate'] = pd.to_datetime(CIK_link['datadate'], format='%Y%m%d')
CIK_link['month'] = CIK_link['datadate'].dt.month
CIK_link['year'] = CIK_link['datadate'].dt.year
CIK_merged = pd.merge(returns, CIK_link, how='left', left_on=['year', 'month', 'CUSIP'],
right_on=['year', 'month', 'merge_CUSIP'])
df_all = pd.merge(CIK_merged, similarity, how='left', on=['cik', 'year', 'month'])
df_all = df_all[df_all['year'] < 2019]
# df_all['new_filing'] = df_all['similarity'].apply(lambda x: 0 if pd.isna(x) else 1)
df_all['similarity'] = df_all.groupby('PERMNO')['similarity'].ffill()
df_all = df_all[~df_all['similarity'].isna()]
df_all = df_all[~df_all['RET'].isna()]
df_all = df_all[(~df_all['PRC'].isna()) & (~df_all['SHROUT'].isna())]
df_all = df_all[['PERMNO', 'date', 'SHRCD', 'EXCHCD', 'SICCD', 'TICKER', 'COMNAM', 'SHRCLS',
'CUSIP', 'PRC', 'RET', 'SHROUT', 'cik', 'similarity', 'filing_date']]
return df_all
def get_portfolio_monthly(n_portfolios, factor_column, df):
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df['quarter'] = df['date'].dt.quarter
# Gets rid of some multiple stuff...
# This needs to be fixed
df = df.drop_duplicates(subset=['PERMNO', 'date'])
# df['random'] = np.random.rand(df.shape[0])
# df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.9 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
# Start where we have more data...
df = df[df['year'] > 1996]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
df['similarity'] = df.groupby('PERMNO')['RET'].shift(-1)
df = df[df['similarity'].notna()]
df_eom = df.drop_duplicates(subset=['PERMNO', 'year', 'month'], keep='last')
df_eom = df_eom[['PERMNO', 'date', 'year', 'month', 'quarter', factor_column, 'market_cap']]
df_eom['group'] = df_eom.groupby(['year', 'month'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
# df_eom['group'] = df_eom.groupby(['year', 'month'])[factor_column].transform(
# lambda x: pd.cut(x=x, bins=n_portfolios, labels=list(range(n_portfolios))))
eom_group_market_cap = df_eom.groupby(['group', 'year', 'month'])['market_cap'].sum().reset_index()
eom_group_market_cap = eom_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eom = pd.merge(df_eom, eom_group_market_cap, how='left', on=['group', 'year', 'month'])
df_eom['weight'] = df_eom['market_cap'] / df_eom['group_market_cap']
# Merge the group
df_eom['merge_year'] = df_eom.apply(lambda x: x['year'] if x['month'] != 12 else x['year'] + 1, axis=1)
df_eom['merge_month'] = df_eom['month'].apply(lambda x: x + 1 if x != 12 else 1)
df_eom = df_eom[['merge_year', 'merge_month', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eom, how='left', left_on=['PERMNO', 'year', 'month'],
right_on=['PERMNO', 'merge_year', 'merge_month'])
# df = df[~df['weight'].isna()]
# df = df[~df['group'].isna()]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
#
# # Validate sorting methodology
# a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
#
# a = df.groupby(['group', 'date'])['weight'].sum().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
df['weighted_return'] = df['weight'] * df['RET']
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
def get_portfolio_monthly_equal(n_portfolios, factor_column, df):
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df['quarter'] = df['date'].dt.quarter
# Gets rid of some multiple stuff...
# This needs to be fixed
df = df.drop_duplicates(subset=['PERMNO', 'date'])
df['market_cap'] = df['PRC'] * df['SHROUT']
# Start where we have more data...
df = df[df['year'] > 1996]
df_eom = df.drop_duplicates(subset=['PERMNO', 'year', 'month'], keep='last')
df_eom = df_eom[['PERMNO', 'date', 'year', 'month', 'quarter', factor_column, 'market_cap']]
df_eom['group'] = df_eom.groupby(['year', 'month'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
# df_eom['group'] = df_eom.groupby(['year', 'month'])[factor_column].transform(
# lambda x: pd.cut(x=x, bins=n_portfolios, labels=list(range(n_portfolios))))
# Merge the group
df_eom['merge_year'] = df_eom.apply(lambda x: x['year'] if x['month'] != 12 else x['year'] + 1, axis=1)
df_eom['merge_month'] = df_eom['month'].apply(lambda x: x + 1 if x != 12 else 1)
df_eom = df_eom[['merge_year', 'merge_month', 'PERMNO', 'group']]
df = pd.merge(df, df_eom, how='left', left_on=['PERMNO', 'year', 'month'],
right_on=['PERMNO', 'merge_year', 'merge_month'])
df.groupby('date')['PERMNO'].count().plot()
plt.show()
# Validate sorting methodology
a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
for i in range(n_portfolios):
plt.plot(a[i], label=f'Portfolio {i}')
plt.show()
portfolios = df.groupby(['group', 'date'])['RET'].mean().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
def get_portfolio_quarterly(n_portfolios, factor_column, df):
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df['quarter'] = df['date'].dt.quarter
# Gets rid of some multiple stuff...
# This needs to be fixed
df = df.drop_duplicates(subset=['PERMNO', 'date'])
# df['random'] = np.random.rand(df.shape[0])
# df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.9 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
# Start where we have more data...
df = df[df['year'] > 1996]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
df_eom = df.drop_duplicates(subset=['PERMNO', 'year', 'month'], keep='last')
df_eom = df_eom[['PERMNO', 'date', 'year', 'month', 'quarter', factor_column, 'market_cap']]
df_eom['group'] = df_eom.groupby(['year', 'quarter'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
eom_group_market_cap = df_eom.groupby(['group', 'year', 'quarter'])['market_cap'].sum().reset_index()
eom_group_market_cap = eom_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eom = pd.merge(df_eom, eom_group_market_cap, how='left', on=['group', 'year', 'quarter'])
df_eom['weight'] = df_eom['market_cap'] / df_eom['group_market_cap']
# Merge the group
df_eom['merge_year'] = df_eom.apply(lambda x: x['year'] if x['quarter'] != 4 else x['year'] + 1, axis=1)
df_eom['merge_quarter'] = df_eom['quarter'].apply(lambda x: x + 1 if x != 4 else 1)
df_eom = df_eom[['merge_year', 'merge_quarter', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eom, how='left', left_on=['PERMNO', 'year', 'quarter'],
right_on=['PERMNO', 'merge_year', 'merge_quarter'])
# df = df[~df['weight'].isna()]
# df = df[~df['group'].isna()]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
# Validate sorting methodology
# a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
#
# a = df.groupby(['group', 'date'])['weight'].sum().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
df['weighted_return'] = df['weight'] * df['RET']
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
def explore(df):
print(df.columns)
# ff3f = pd.read_csv('../../Data/FF3F.csv')
# ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
# ff3f['SMB'] = ff3f['SMB'] / 100
# ff3f['HML'] = ff3f['HML'] / 100
# ff3f['RF'] = ff3f['RF'] / 100
# ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
# ff3f = ff3f.rename(columns={'Date': 'date'})
#
# df['year'] = df['date'].dt.year
# df['month'] = df['date'].dt.month
#
# ff3f['year'] = ff3f['date'].dt.year
# ff3f['month'] = ff3f['date'].dt.month
# ff3f = ff3f.drop(columns=['date'])
#
# df = pd.merge(df, ff3f, how='left', on=['year', 'month'])
#
# df = df.drop(columns=['year', 'month'])
#
# df['RET_M'] = df['RET'] - (df['Mkt-RF'] + df['RF'])
df['RET_t_1'] = df.groupby('PERMNO')['RET'].shift(-1)
a = df[['RET_t_1', 'similarity', 'RET']]
a = a.dropna()
a['random'] = np.random.rand(a.shape[0])
# print(a.head())
# plt.scatter(x=a['similarity'], y=a['RET_t_1'], s=10)
# plt.xlabel('Similarity')
# plt.ylabel('Return')
#
# plt.show()
signal = a['RET_t_1']
pos_signal = signal.copy()
neg_signal = signal.copy()
# pos_signal[pos_signal <= 0] = np.nan
# neg_signal[neg_signal > 0] = np.nan
pos_signal = []
neg_signal = []
returns = list(a['RET_t_1'])
similarities = list(a['similarity'])
randoms = list(a['random'])
for i, r in enumerate(a['RET_t_1']):
if r < 0:
neg_signal.append(randoms[i])
# neg_signal.append(similarities[i])
else:
# pos_signal.append(similarities[i])
pos_signal.append(randoms[i])
# plt.hist(pos_signal, bins=100)
# plt.show()
#
# plt.hist(neg_signal, bins=100)
# plt.show()
bins = np.linspace(0, 1, 1000)
plt.hist(neg_signal, bins, alpha=0.5, label='neg')
plt.hist(pos_signal, bins, alpha=0.5, label='pos')
plt.legend(loc='upper right')
plt.show()
# plotting
# plt.style.use('fivethirtyeight')
# plt.scatter(a['similarity'], pos_signal, color='r', s=10)
# plt.scatter(a['similarity'], neg_signal, color='b', s=10)
# # plt.savefig('pos_neg.png', dpi=200)
# plt.show()
# plt.scatter(x=a['RET'], y=a['RET_t_1'], s=10)
# plt.xlabel('Current Return')
# plt.ylabel('Next Period Return')
#
# plt.show()
if __name__ == "__main__":
d = get_data(path_quarter='../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Quarterly',
path_annual='../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Annual')
# d = get_data(path_quarter='../EntireFile/Cosine/CountVectorizer/Quarterly',
# path_annual=None)
#
# explore(d)
n_portfolios = 10
# p = get_portfolio_quarterly(5, 'similarity', d)
p = get_portfolio_monthly(n_portfolios, 'similarity', d)
# p.to_csv('test.csv')
# p = get_portfolio_monthly(5, 'similarity', d)
plot_cumulative_returns(p, n_portfolios)
summary_statistics(p, n_portfolios)
# Without annual statement similarity
# Model Portfolio Alpha t-Stat Return Risk Sharpe
# 0 FF3F 0 1.11% 1.3338 7.66% 15.44% 0.4962
# 0 CAPM 0 1.27% 1.5123 7.66% 15.44% 0.4962
# 0 FF3F 1 1.31% 1.5722 7.69% 15.19% 0.506
# 0 CAPM 1 1.41% 1.667 7.69% 15.19% 0.506
# 0 FF3F 2 1.41% 1.8999 8.08% 15.64% 0.5163
# 0 CAPM 2 1.56% 2.0172 8.08% 15.64% 0.5163
# 0 FF3F 3 3.34% 3.7504 10.1% 15.97% 0.6325
# 0 CAPM 3 3.52% 3.831 10.1% 15.97% 0.6325
# 0 FF3F 4 4.34% 4.5655 10.91% 16.03% 0.6808
# 0 CAPM 4 4.32% 4.5679 10.91% 16.03% 0.6808
# 0 FF3F L4S0 3.22% 2.3094 3.25% 6.48% 0.5019
# 0 CAPM L4S0 3.05% 2.1854 3.25% 6.48% 0.5019
# 0 Market 8.63% 15.43% 0.5593<file_sep>import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import statsmodels.api as sm
import math
import os
from PetriProgramming.PortfolioHelper import plot_cumulative_returns, summary_statistics
pd.set_option('display.max_columns', 80)
pd.set_option('display.max_rows', 100)
pd.set_option('display.min_rows', 50)
pd.options.display.width = 0
def get_data2():
_, _, directories = next(os.walk('../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Quarterly'))
# _, _, directories = next(os.walk('../EntireFile/Cosine/CountVectorizer/Quarterly'))
similarity = pd.DataFrame()
for d in directories:
df = pd.read_csv(f'../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Quarterly/{d}', header=None, names=['date', 'cik', 'similarity'])
similarity = similarity.append(df)
# similarity = pd.read_csv('../EntireFile/Cosine/CountVectorizer/Quarterly', index_col=0)
similarity['date'] = pd.to_datetime(similarity['date'], format='%Y%m%d')
returns = pd.read_csv('../../Data/Returns-CRSP.csv')
returns['date'] = pd.to_datetime(returns['date'], format='%Y%m%d')
returns['PRC'] = returns['PRC'].apply(lambda x: abs(x))
returns['RET'] = pd.to_numeric(returns['RET'], errors='coerce')
CIK_link = pd.read_csv('../../Data/CIK_Link.csv')
CIK_link['merge_CUSIP'] = CIK_link['cusip'].apply(lambda x: str(x)[:8])
CIK_link['datadate'] = pd.to_datetime(CIK_link['datadate'], format='%Y%m%d')
# Don't merge on date, too?
CIK_merged = pd.merge(returns, CIK_link, how='left', left_on=['date', 'CUSIP'], right_on=['datadate', 'merge_CUSIP'])
df_all = pd.merge(CIK_merged, similarity, how='left', on=['cik', 'date'])
df_all['similarity'] = df_all.groupby('PERMNO')['similarity'].ffill()
df_all = df_all[~df_all['similarity'].isna()]
df_all = df_all[~df_all['RET'].isna()]
df_all = df_all[(~df_all['PRC'].isna()) & (~df_all['SHROUT'].isna())]
df_all = df_all[['PERMNO', 'date', 'SHRCD', 'EXCHCD', 'SICCD', 'TICKER', 'COMNAM', 'SHRCLS',
'CUSIP', 'PRC', 'RET', 'SHROUT', 'cik', 'similarity']]
return df_all
def get_data3():
_, _, directories = next(os.walk('../EntireFilePreviousYear/Cosine/CountVectorizerBinary/Quarterly'))
# _, _, directories = next(os.walk('../EntireFile/Cosine/CountVectorizer/Quarterly'))
similarity = pd.DataFrame()
for d in directories:
df = pd.read_csv(f'../EntireFile/Cosine/CountVectorizer/Quarterly/{d}', header=None,
names=['date', 'cik', 'similarity'])
similarity = similarity.append(df)
# similarity = pd.read_csv('../EntireFile/Cosine/CountVectorizer/Quarterly', index_col=0)
similarity['date'] = pd.to_datetime(similarity['date'], format='%Y%m%d')
similarity['month'] = similarity['date'].dt.month
similarity['year'] = similarity['date'].dt.year
similarity = similarity.rename(columns={'date': 'filing_date'})
returns = pd.read_csv('../../Data/Returns-CRSP.csv')
returns['date'] = pd.to_datetime(returns['date'], format='%Y%m%d')
returns['PRC'] = returns['PRC'].apply(lambda x: abs(x))
returns['RET'] = pd.to_numeric(returns['RET'], errors='coerce')
returns['year'] = returns['date'].dt.year
returns['month'] = returns['date'].dt.month
CIK_link = pd.read_csv('../../Data/CIK_Link.csv')
CIK_link['merge_CUSIP'] = CIK_link['cusip'].apply(lambda x: str(x)[:8])
CIK_link['datadate'] = pd.to_datetime(CIK_link['datadate'], format='%Y%m%d')
CIK_link['month'] = CIK_link['datadate'].dt.month
CIK_link['year'] = CIK_link['datadate'].dt.year
CIK_merged = pd.merge(returns, CIK_link, how='left', left_on=['year', 'month', 'CUSIP'],
right_on=['year', 'month', 'merge_CUSIP'])
# print(CIK_merged)
df_all = pd.merge(CIK_merged, similarity, how='left', on=['cik', 'year', 'month'])
df_all = df_all[df_all['year'] < 2019]
# df_all['new_filing'] = df_all['similarity'].apply(lambda x: 0 if pd.isna(x) else 1)
#
df_all['similarity'] = df_all.groupby('PERMNO')['similarity'].ffill()
df_all = df_all[~df_all['similarity'].isna()]
# print(df_all)
df_all = df_all[~df_all['RET'].isna()]
df_all = df_all[(~df_all['PRC'].isna()) & (~df_all['SHROUT'].isna())]
df_all = df_all[['PERMNO', 'date', 'SHRCD', 'EXCHCD', 'SICCD', 'TICKER', 'COMNAM', 'SHRCLS',
'CUSIP', 'PRC', 'RET', 'SHROUT', 'cik', 'similarity']]
return df_all
def get_portfolio_returns2(n_portfolios, factor_column, df):
"""
Rebalances portfolios monthly to weights determined at previous year's end, reconstitutes at previous year's end
:param n_portfolios: number of portfolios for sorting
:param factor_column: column that we are sorting on
:param df: dataframe of data
:return: returns monthly returns for all portfolios
"""
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df = df.drop_duplicates(subset=['PERMNO', 'date'])
df['random'] = np.random.rand(df.shape[0])
# df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.9 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
df_eoy = df[df['month'] == 12]
df_eoy = df_eoy[['PERMNO', 'date', 'year', 'month', factor_column, 'market_cap']]
df_eoy['group'] = df_eoy.groupby(['year'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
eoy_group_market_cap = df_eoy.groupby(['group', 'year'])['market_cap'].sum().reset_index()
eoy_group_market_cap = eoy_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eoy = pd.merge(df_eoy, eoy_group_market_cap, how='left', on=['group', 'year'])
df_eoy['weight'] = df_eoy['market_cap'] / df_eoy['group_market_cap']
# Merge the group
df_eoy['merge_year'] = df_eoy['year'] + 1
df_eoy = df_eoy[['merge_year', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eoy, how='left', left_on=['PERMNO', 'year'], right_on=['PERMNO', 'merge_year'])
df = df[~df['weight'].isna()]
df = df[~df['group'].isna()]
a = df.groupby(['group', 'date'])['PERMNO'].count()
for i in range(n_portfolios):
plt.plot(a[i])
plt.show()
df['weighted_return'] = df['weight'] * df['RET']
# df.to_csv('wtf.csv')
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
# TODO: Actual return here will be messed up.... fix later! ????
# TODO: Rename columns and stuff
# TODO: Add Long-Short
# TODO: Why is weight above 1?
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
def get_portfolio_quarterly(n_portfolios, factor_column, df):
"""
Rebalances portfolios monthly to weights determined at previous year's end, reconstitutes at previous year's end
:param n_portfolios: number of portfolios for sorting
:param factor_column: column that we are sorting on
:param df: dataframe of data
:return: returns monthly returns for all portfolios
"""
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df['quarter'] = df['date'].dt.quarter
# Gets rid of some multiple stuff...
df = df.drop_duplicates(subset=['PERMNO', 'date'])
# df['random'] = np.random.rand(df.shape[0])
# df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.9 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
# Start where we have more data...
df = df[df['year'] > 1996]
df.groupby('date')['PERMNO'].count().plot()
plt.show()
df_eoq = df.drop_duplicates(subset=['PERMNO', 'year', 'quarter'], keep='last')
df_eoq = df_eoq[['PERMNO', 'date', 'year', 'month', 'quarter', factor_column, 'market_cap']]
df_eoq['group'] = df_eoq.groupby(['year', 'quarter'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
eoq_group_market_cap = df_eoq.groupby(['group', 'year', 'quarter'])['market_cap'].sum().reset_index()
eoq_group_market_cap = eoq_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eoq = pd.merge(df_eoq, eoq_group_market_cap, how='left', on=['group', 'year', 'quarter'])
df_eoq['weight'] = df_eoq['market_cap'] / df_eoq['group_market_cap']
# Merge the group
df_eoq['merge_year'] = df_eoq.apply(lambda x: x['year'] if x['quarter'] != 4 else x['year'] + 1, axis=1)
df_eoq['merge_quarter'] = df_eoq['quarter'].apply(lambda x: x + 1 if x != 4 else 1)
df_eoq = df_eoq[['merge_year', 'merge_quarter', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eoq, how='left', left_on=['PERMNO', 'year', 'quarter'], right_on=['PERMNO', 'merge_year', 'merge_quarter'])
# df = df[~df['weight'].isna()]
# df = df[~df['group'].isna()]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
# Validate sorting methodology
# a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
#
# a = df.groupby(['group', 'date'])['weight'].sum().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
df['weighted_return'] = df['weight'] * df['RET']
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
def get_portfolio_monthly(n_portfolios, factor_column, df):
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df['year'] = df['date'].dt.year
df['month'] = df['date'].dt.month
df['quarter'] = df['date'].dt.quarter
# Gets rid of some multiple stuff...
df = df.drop_duplicates(subset=['PERMNO', 'date'])
# df['random'] = np.random.rand(df.shape[0])
# df['similarity'] = df['similarity'].apply(lambda x: np.nan if x < 0.9 else x)
# df['similarity'] = df.groupby('PERMNO')['similarity'].ffill()
# df = df[~df['similarity'].isna()]
df['market_cap'] = df['PRC'] * df['SHROUT']
# Start where we have more data...
df = df[df['year'] > 1996]
df.groupby('date')['PERMNO'].count().plot()
plt.show()
df_eoq = df.drop_duplicates(subset=['PERMNO', 'year', 'quarter'], keep='last')
df_eoq = df_eoq[['PERMNO', 'date', 'year', 'month', 'quarter', factor_column, 'market_cap']]
df_eoq['group'] = df_eoq.groupby(['year', 'quarter'])[factor_column].transform(
lambda x: pd.qcut(x, n_portfolios, range(n_portfolios)))
eoq_group_market_cap = df_eoq.groupby(['group', 'year', 'quarter'])['market_cap'].sum().reset_index()
eoq_group_market_cap = eoq_group_market_cap.rename(columns={'market_cap': 'group_market_cap'})
df_eoq = pd.merge(df_eoq, eoq_group_market_cap, how='left', on=['group', 'year', 'quarter'])
df_eoq['weight'] = df_eoq['market_cap'] / df_eoq['group_market_cap']
# Merge the group
df_eoq['merge_year'] = df_eoq.apply(lambda x: x['year'] if x['quarter'] != 4 else x['year'] + 1, axis=1)
df_eoq['merge_quarter'] = df_eoq['quarter'].apply(lambda x: x + 1 if x != 4 else 1)
df_eoq = df_eoq[['merge_year', 'merge_quarter', 'PERMNO', 'group', 'group_market_cap', 'weight']]
df = pd.merge(df, df_eoq, how='left', left_on=['PERMNO', 'year', 'quarter'],
right_on=['PERMNO', 'merge_year', 'merge_quarter'])
# df = df[~df['weight'].isna()]
# df = df[~df['group'].isna()]
# df.groupby('date')['PERMNO'].count().plot()
# plt.show()
# Validate sorting methodology
# a = df.groupby(['group', 'date'])['PERMNO'].count().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
#
# a = df.groupby(['group', 'date'])['weight'].sum().unstack().T
# for i in range(n_portfolios):
# plt.plot(a[i], label=f'Portfolio {i}')
# plt.show()
df['weighted_return'] = df['weight'] * df['RET']
portfolios = df.groupby(['group', 'date'])['weighted_return'].sum().unstack().T.reset_index()
ff3f = pd.read_csv('../../Data/FF3F.csv')
ff3f['Mkt-RF'] = ff3f['Mkt-RF'] / 100
ff3f['SMB'] = ff3f['SMB'] / 100
ff3f['HML'] = ff3f['HML'] / 100
ff3f['RF'] = ff3f['RF'] / 100
ff3f['Date'] = pd.to_datetime(ff3f['Date'], format='%Y%m').dt.to_period('M').dt.to_timestamp('M')
ff3f = ff3f.rename(columns={'Date': 'date'})
portfolios['year'] = portfolios['date'].dt.year
portfolios['month'] = portfolios['date'].dt.month
ff3f['year'] = ff3f['date'].dt.year
ff3f['month'] = ff3f['date'].dt.month
ff3f = ff3f.drop(columns=['date'])
portfolios = pd.merge(portfolios, ff3f, how='left', on=['year', 'month'])
portfolios = portfolios.drop(columns=['year', 'month'])
return portfolios
if __name__ == "__main__":
d = get_data3()
# d.to_csv('entireSimilarityData.csv')
# d = pd.read_csv('../entireSimilarityData.csv', index_col=0)
ports = get_portfolio_quarterly(5, 'similarity', d)
plot_cumulative_returns(ports, 5)
summary_statistics(ports, 5)
<file_sep>import os
import re
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import plot_roc_curve, plot_confusion_matrix
from sklearn.model_selection import train_test_split
import numpy as np
import matplotlib.pyplot as plt
import shap
from sklearn.model_selection import cross_validate
from tensorflow.keras.models import Sequential
from tensorflow.keras import layers
import tensorflow.keras as keras
from sklearn.utils import class_weight
from sklearn.metrics import confusion_matrix
import seaborn as sns
PATH = "/Data/10-Q Sample"
# PATH = "D:\\Python\\Projects\\FinTech\\SEC-Analytics\\Data\\10-Q Sample"
# PATH = "C:\\Users\\jpetr\\PycharmProjects\\SEC-Analytics\\Data\\10-K Sample"
REGEX_10K = r"(Item[\s]+?7\.[\s\S]*?)(Item[\s]+?8\.)"
REGEX_10Q = r"(Item[\s]+?2\.[\s\S]*?)(Item[\s]+?3\.)"
pd.options.display.width = 0
# pd.set_printoptions(max_rows=200, max_columns=10)
pd.set_option('display.max_columns', 10)
pd.set_option('display.max_rows', 100)
def get_supervised(new_file_name):
already_done = []
already_done = open('QuarterlySupervised.csv').read()
_, _, file_names = next(
os.walk(PATH))
# f'C:\\Users\\jpetr\\PycharmProjects\\SEC-Analytics\\Data\\10-K Sample'))
for file_name in file_names:
if file_name in already_done:
print('file already done!')
continue
file_path = f'{PATH}\\{file_name}'
file_text = open(file_path).read()
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
opening_index = None
closing_index = None
try:
mda = match[-1][0]
modified_file_text = ''
for index, s in enumerate(re.split(r'item', file_text, flags=re.IGNORECASE)):
modified_file_text += f'{s} item $%$ {index} $%$'
for index, item in enumerate(items):
# Check for beginning index
if file_text[item: item + len(mda)] == mda:
opening_index = index
closing_index = min(range(len(items)), key=lambda i: abs(items[i] - (item + len(mda))))
modified_file = open('modified.txt', 'w')
modified_file.write(modified_file_text)
modified_file.close()
os.startfile('modified.txt')
# os.startfile(file_path)
output_file = open('output.txt', 'w')
output_file.write(mda)
output_file.close()
os.startfile('output.txt')
except Exception as e:
print(e, file_name)
print(f'\n{file_name}\nOpening: {opening_index}\nClosing: {closing_index}')
actual_open = input('What is the open index? :')
actual_close = input('What is the close index? :')
if actual_open == '':
actual_open = opening_index
if actual_close == '':
actual_close = closing_index
supervised_file = open(new_file_name, 'a')
supervised_file.write(f'{file_name},{actual_open},{actual_close}\n')
supervised_file.close()
def data_mine(X_train, X_test, y_train, y_test):
from sklearn.metrics import accuracy_score, log_loss
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC, LinearSVC, NuSVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
classifiers = [
KNeighborsClassifier(3),
SVC(kernel="rbf", C=0.025, probability=True),
# NuSVC(probability=True),
DecisionTreeClassifier(),
RandomForestClassifier(),
AdaBoostClassifier(),
GradientBoostingClassifier(),
GaussianNB(),
LinearDiscriminantAnalysis(),
# QuadraticDiscriminantAnalysis()
]
# Logging for Visual Comparison
log_cols = ["Classifier", "Accuracy", "Log Loss"]
log = pd.DataFrame(columns=log_cols)
for clf in classifiers:
clf.fit(X_train, y_train)
name = clf.__class__.__name__
print("=" * 30)
print(name)
print('****Results****')
train_predictions = clf.predict(X_test)
acc = accuracy_score(y_test, train_predictions)
print("Accuracy: {:.4%}".format(acc))
train_predictions = clf.predict_proba(X_test)
ll = log_loss(y_test, train_predictions)
print("Log Loss: {}".format(ll))
log_entry = pd.DataFrame([[name, acc * 100, ll]], columns=log_cols)
log = log.append(log_entry)
plot_roc_curve(clf, X_test, y_test)
plt.show()
print("=" * 30)
def test():
df = pd.read_csv('QuarterlySupervised.csv')
# print(df)
df = df[df['open'] != 'None']
df = df[df['close'] != 'None']
data = pd.DataFrame(columns=['file', 'true_location_open', 'true_location_close', 'input_location', 'position', 'trailing_management',
'trailing_period', 'trailing_7', 'y_open', 'y_close', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'regex_open',
'regex_close', 'trailing_financial', 'trailing_7A', 'leading_words', 'leading_see',
'leading_text', 'leading_double_newline', 'is_uppercase', 'trailing_omission', 'leading_with',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis', 'trailing_8'])
# X_open = pd.DataFrame()
# X_close = pd.DataFrame()
for df_index, row in df.iterrows():
y_close = []
y_open = []
file_text = open(f'{PATH}\\{row.file}').read()
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
close_items = [0] * len(items)
close_items[int(row.close)] = 1
open_items = [0] * len(items)
open_items[int(row.open)] = 1
y_close += close_items
y_open += open_items
total_text_length = len(file_text)
match = re.findall(REGEX_10K, file_text, re.IGNORECASE)
for index, item in enumerate(items):
# Probably shitty? Make smaller window.
if 'management' in file_text[item: item + 50].lower():
# if 'management' in file_text[item - 100: item + 100].lower():
trailing_management = 1
else:
trailing_management = 0
if 'analysis' in file_text[item: item + 80].lower():
# if 'management' in file_text[item - 100: item + 100].lower():
trailing_analysis = 1
else:
trailing_analysis = 0
if '.' in file_text[item: item + 20]:
trailing_period = 1
else:
trailing_period = 0
if '7' in file_text[item: item + 20]:
trailing_7 = 1
else:
trailing_7 = 0
if '8' in file_text[item: item + 20]:
trailing_8 = 1
else:
trailing_8 = 0
if '\n' in file_text[item: item + 100]:
trailing_newline = 1
else:
trailing_newline = 0
if '7a' in file_text[item: item + 15].lower():
trailing_7A = 1
else:
trailing_7A = 0
if file_text[item: item + 15].isupper():
is_uppercase = 1
else:
is_uppercase = 0
if '\n' in file_text[item - 5: item]:
leading_newline = 1
else:
leading_newline = 0
if '\n\n' in file_text[item - 5: item]:
leading_double_newline = 1
else:
leading_double_newline = 0
if '\t' in file_text[item - 5: item]:
leading_tab = 1
else:
leading_tab = 0
if ' ' in file_text[item - 5: item]:
leading_spaces = 1
else:
leading_spaces = 0
if (len(file_text[item - 40: item].split(' ')) > 2) and ((sum([len(w) for w in file_text[item - 40: item].split(' ')]) / len(file_text[item - 40: item].split(' '))) > 2):
leading_words = 1
else:
leading_words = 0
if 'see' in file_text[item - 10: item].lower():
leading_see = 1
else:
leading_see = 0
if 'with' in file_text[item - 10: item].lower():
leading_with = 1
else:
leading_with = 0
if len(re.findall(r'\w+', file_text[item - 5: item])) > 0:
leading_text = 1
else:
leading_text = 0
if 'financial' in file_text[item: item + 30].lower():
trailing_financial = 1
else:
trailing_financial = 0
if ('applicable' in file_text[item: item + 300].lower()) or ('omitted' in file_text[item: item + 300].lower()):
trailing_omission = 1
else:
trailing_omission = 0
if 'table of contents' in file_text[item - 50: item].lower():
leading_table_of_contents = 1
else:
leading_table_of_contents = 0
leading_newline_count = len(file_text[item - 20: item].split('\n'))
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
# regex_close = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
# regex_open = 1
regex_close = 1
except IndexError:
e = 1
data = data.append(
{
'file': row.file,
'position': item / total_text_length,
'y_open': y_open[index],
'trailing_management': trailing_management,
'y_close': y_close[index],
'trailing_period': trailing_period,
'trailing_7': trailing_7,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_open': regex_open,
'regex_close': regex_close,
'trailing_financial': trailing_financial,
'true_location_open': row.open,
'true_location_close': row.close,
'input_location': index,
'trailing_7A': trailing_7A,
'leading_words': leading_words,
'leading_see': leading_see,
'leading_text': leading_text,
'leading_double_newline': leading_double_newline,
'is_uppercase': is_uppercase,
'trailing_omission': trailing_omission,
'leading_with': leading_with,
'leading_table_of_contents': leading_table_of_contents,
'leading_newline_count': leading_newline_count,
'trailing_analysis': trailing_analysis,
'trailing_8': trailing_8
}, ignore_index=True)
data.to_csv('quarterlyClassification.csv')
logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# logistic = RandomForestClassifier(class_weight='balanced', max_depth=5, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# # logistic = LogisticRegression(class_weight='balanced', random_state=69)
# X = data[['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline', 'leading_newline',
# 'total_size', 'leading_tab', 'leading_spaces', 'regex_open', 'trailing_financial', 'trailing_7A',
# 'leading_words', 'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
# 'trailing_omission', 'leading_with']]
X = data[['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline', 'leading_newline',
'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'trailing_7A', 'regex_open',
'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']]
# X = data[['regex_open']]
y = data['y_open'].astype(int)
def correct(a, b):
l = []
for i in range(len(a)):
if a[i] == b[i]:
l.append(1)
else:
l.append(0)
return l
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# #
# # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
cm = confusion_matrix(y_test, correct(list(X_test['regex_open']), list(y_test)))
print(cm)
cm = confusion_matrix(y_train, correct(list(X_train['regex_open']), list(y_train)))
print(cm)
plot_confusion_matrix(logistic, X_train, y_train)
plt.show()
plot_confusion_matrix(logistic, X_test, y_test)
plt.show()
# f = sns.heatmap(cm, annot=True)
# plot_roc_curve(logistic, X_train, y_train)
# plt.show()
# plot_roc_curve(logistic, X_test, y_test)
# plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
# data['predicted'] = logistic.predict(X)
# data['prob'] = [probs[1] for probs in logistic.predict_proba(X)]
# print(
# len(data[(data['y_open'] == data['regex_open']) & (data['y_open'] == 0)]),
# len(data[(data['y_open'] != data['regex_open']) & (data['y_open'] == 1)]),
# len(data[(data['y_open'] != data['regex_open']) & (data['y_open'] == 0)]),
# len(data[(data['y_open'] == data['regex_open']) & (data['y_open'] == 1)])
# )
for index, row in data[~(data['predicted'] == data['y_open'])].iterrows():
os.startfile(f'{PATH}\\{row.file}')
print(row)
input('continue?')
# shap.plots.beeswarm(shap_values)
# data_mine(X_train, X_test, y_train, y_test)
def test_quarterly():
df = pd.read_csv('QuarterlySupervised.csv')
# print(df)
df = df[df['open'] != 'None']
df = df[df['close'] != 'None']
data = pd.DataFrame(columns=['file', 'true_location_open', 'true_location_close', 'input_location', 'position', 'trailing_management',
'trailing_period', 'trailing_2', 'y_open', 'y_close', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'regex_open',
'regex_close', 'trailing_financial', 'leading_words', 'leading_see',
'leading_text', 'leading_double_newline', 'is_uppercase', 'trailing_omission', 'leading_with',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis', 'trailing_3', 'trailing_quantitative'])
# X_open = pd.DataFrame()
# X_close = pd.DataFrame()
for df_index, row in df.iterrows():
y_close = []
y_open = []
file_text = open(f'{PATH}\\{row.file}').read()
items = [m.start() for m in re.finditer('item', file_text, re.IGNORECASE)]
close_items = [0] * len(items)
close_items[int(row.close)] = 1
open_items = [0] * len(items)
open_items[int(row.open)] = 1
y_close += close_items
y_open += open_items
total_text_length = len(file_text)
match = re.findall(REGEX_10Q, file_text, re.IGNORECASE)
for index, item in enumerate(items):
# Probably shitty? Make smaller window.
if 'management' in file_text[item: item + 80].lower():
# if 'management' in file_text[item - 100: item + 100].lower():
trailing_management = 1
else:
trailing_management = 0
if 'quantitative' in file_text[item: item + 80].lower():
trailing_quantitative = 1
else:
trailing_quantitative = 0
if 'analysis' in file_text[item: item + 80].lower():
# if 'management' in file_text[item - 100: item + 100].lower():
trailing_analysis = 1
else:
trailing_analysis = 0
if '.' in file_text[item: item + 20]:
trailing_period = 1
else:
trailing_period = 0
if '2' in file_text[item: item + 20]:
trailing_2 = 1
else:
trailing_2 = 0
if '3' in file_text[item: item + 20]:
trailing_3 = 1
else:
trailing_3 = 0
if '\n' in file_text[item: item + 100]:
trailing_newline = 1
else:
trailing_newline = 0
if file_text[item: item + 15].isupper():
is_uppercase = 1
else:
is_uppercase = 0
if '\n' in file_text[item - 5: item]:
leading_newline = 1
else:
leading_newline = 0
if '\n\n' in file_text[item - 5: item]:
leading_double_newline = 1
else:
leading_double_newline = 0
if '\t' in file_text[item - 5: item]:
leading_tab = 1
else:
leading_tab = 0
if ' ' in file_text[item - 5: item]:
leading_spaces = 1
else:
leading_spaces = 0
if (len(file_text[item - 40: item].split(' ')) > 2) and ((sum([len(w) for w in file_text[item - 40: item].split(' ')]) / len(file_text[item - 40: item].split(' '))) > 2):
leading_words = 1
else:
leading_words = 0
if 'see' in file_text[item - 10: item].lower():
leading_see = 1
else:
leading_see = 0
if 'with' in file_text[item - 10: item].lower():
leading_with = 1
else:
leading_with = 0
if len(re.findall(r'\w+', file_text[item - 5: item])) > 0:
leading_text = 1
else:
leading_text = 0
if 'financial' in file_text[item: item + 30].lower():
trailing_financial = 1
else:
trailing_financial = 0
if ('applicable' in file_text[item: item + 300].lower()) or ('omitted' in file_text[item: item + 300].lower()):
trailing_omission = 1
else:
trailing_omission = 0
if 'table of contents' in file_text[item - 50: item].lower():
leading_table_of_contents = 1
else:
leading_table_of_contents = 0
leading_newline_count = len(file_text[item - 20: item].split('\n'))
regex_open = 0
regex_close = 0
try:
mda = match[-1][0]
if file_text[item: item + len(mda)] == mda:
regex_open = 1
# regex_close = 1
except IndexError:
e = 1
try:
mda = match[-1][0]
if file_text[item - len(mda): item] == mda:
# regex_open = 1
regex_close = 1
except IndexError:
e = 1
data = data.append(
{
'file': row.file,
'position': item / total_text_length,
'y_open': y_open[index],
'trailing_management': trailing_management,
'y_close': y_close[index],
'trailing_period': trailing_period,
'trailing_2': trailing_2,
'trailing_newline': trailing_newline,
'leading_newline': leading_newline,
'total_size': total_text_length,
'leading_tab': leading_tab,
'leading_spaces': leading_spaces,
'regex_open': regex_open,
'regex_close': regex_close,
'trailing_financial': trailing_financial,
'true_location_open': row.open,
'true_location_close': row.close,
'input_location': index,
'leading_words': leading_words,
'leading_see': leading_see,
'leading_text': leading_text,
'leading_double_newline': leading_double_newline,
'is_uppercase': is_uppercase,
'trailing_omission': trailing_omission,
'leading_with': leading_with,
'leading_table_of_contents': leading_table_of_contents,
'leading_newline_count': leading_newline_count,
'trailing_analysis': trailing_analysis,
'trailing_3': trailing_3,
'trailing_quantitative': trailing_quantitative
}, ignore_index=True)
data.to_csv('quarterlyClassification.csv')
logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# logistic = RandomForestClassifier(class_weight='balanced', max_depth=5, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# # logistic = LogisticRegression(class_weight='balanced', random_state=69)
# X = data[['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline', 'leading_newline',
# 'total_size', 'leading_tab', 'leading_spaces', 'regex_open', 'trailing_financial', 'trailing_7A',
# 'leading_words', 'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
# 'trailing_omission', 'leading_with']]
X = data[['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline', 'leading_newline',
'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'trailing_7A', 'regex_open',
'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']]
# X = data[['regex_open']]
y = data['y_open'].astype(int)
def correct(a, b):
l = []
for i in range(len(a)):
if a[i] == b[i]:
l.append(1)
else:
l.append(0)
return l
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# #
# # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
cm = confusion_matrix(y_test, correct(list(X_test['regex_open']), list(y_test)))
print(cm)
cm = confusion_matrix(y_train, correct(list(X_train['regex_open']), list(y_train)))
print(cm)
plot_confusion_matrix(logistic, X_train, y_train)
plt.show()
plot_confusion_matrix(logistic, X_test, y_test)
plt.show()
# f = sns.heatmap(cm, annot=True)
# plot_roc_curve(logistic, X_train, y_train)
# plt.show()
# plot_roc_curve(logistic, X_test, y_test)
# plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
# data['predicted'] = logistic.predict(X)
# data['prob'] = [probs[1] for probs in logistic.predict_proba(X)]
# print(
# len(data[(data['y_open'] == data['regex_open']) & (data['y_open'] == 0)]),
# len(data[(data['y_open'] != data['regex_open']) & (data['y_open'] == 1)]),
# len(data[(data['y_open'] != data['regex_open']) & (data['y_open'] == 0)]),
# len(data[(data['y_open'] == data['regex_open']) & (data['y_open'] == 1)])
# )
for index, row in data[~(data['predicted'] == data['y_open'])].iterrows():
os.startfile(f'{PATH}\\{row.file}')
print(row)
input('continue?')
# shap.plots.beeswarm(shap_values)
# data_mine(X_train, X_test, y_train, y_test)
def selection(df):
independent = ['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'regex_open',
'regex_close', 'trailing_financial', 'trailing_7A', 'leading_words', 'leading_see',
'leading_text', 'leading_double_newline', 'is_uppercase', 'trailing_omission', 'leading_with',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']
dependent = 'y_open'
X = df[independent]
y = df[dependent]
model = RandomForestClassifier(class_weight='balanced', n_estimators=1000)
cv_results = cross_validate(model, X, y, cv=5, scoring=('precision', 'recall'), return_train_score=True)
print(cv_results['test_recall'])
print(cv_results['train_recall'])
print(cv_results['test_precision'])
print(cv_results['train_precision'])
def neural(df):
from sklearn.preprocessing import StandardScaler
independent = ['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline',
'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'regex_open', 'trailing_financial', 'trailing_7A', 'leading_words', 'leading_see',
'leading_text', 'leading_double_newline', 'is_uppercase', 'trailing_omission', 'leading_with',
'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']
# independent = ['y_open']
dependent = 'y_open'
X = df[independent]
y = df[dependent]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
model = keras.Sequential()
model.add(layers.Dense(len(independent), activation="linear", input_shape=(len(independent),)))
model.add(layers.Dense(100, activation='linear'))
model.add(layers.Dense(32, activation='linear'))
# model.add(layers.Dense(32, activation='sigmoid'))
model.add(layers.Dense(1, activation="sigmoid"))
model.compile(
loss=keras.losses.BinaryCrossentropy(),
optimizer=keras.optimizers.SGD(lr=0.001),
metrics=[keras.metrics.Precision(), keras.metrics.Recall()]
)
# weights = dict(enumerate(class_weight.compute_class_weight('balanced', np.unique(y_train), y_train)))
model.fit(X_train, y_train,
epochs=400,
verbose=True,
validation_data=(X_test, y_test),
batch_size=5)
# loss = model.evaluate(X_train, y_train, verbose=False)
# print("Training loss: {:.4f}".format(loss))
# loss = model.evaluate(X_test, y_test, verbose=False)
# print("Testing loss: {:.4f}".format(loss))
print(confusion_matrix(y_test, np.where(model.predict(X_test) > 0.5, 1, 0)))
print(confusion_matrix(y_train, np.where(model.predict(X_train) > 0.5, 1, 0)))
def open_training():
df = pd.read_csv('Text Extraction\\data.csv', index_col=0)
df = df[df['y_open'] != 'None']
df['true_class'] = np.where(df['input_location'] == df['true_location_open'], 1, 0)
# df['regex_class'] = np.where(df['regex_open'] == df['true_location_open'], 1, 0)
X = df[list(df.columns)]
y = df['y_open']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# cm = confusion_matrix(y_true=y, y_pred=X['regex_open'])
cm = confusion_matrix(y_true=y_train, y_pred=X_train['regex_open'], normalize='true')
# f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
# cm = confusion_matrix(y_true=y_test, y_pred=X_test['regex_open'], normalize='true')
# f = sns.heatmap(cm, annot=True, cmap='Blues')
# logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# # logistic = LogisticRegression(class_weight='balanced', random_state=69)
X = df[['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_newline', 'leading_newline',
'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'trailing_7A', 'regex_open',
'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']]
# X = data[['regex_open']]
y = df['y_open'].astype(int)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
logistic = RandomForestClassifier(class_weight='balanced', max_depth=6, random_state=699, n_estimators=1000)
# #
# # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
import pickle
with open('opening_random_forest.pkl', 'wb') as f:
pickle.dump(logistic, f)
plot_confusion_matrix(logistic, X_train, y_train, cmap='Blues')
# plt.show()
plot_confusion_matrix(logistic, X_test, y_test, cmap='Blues')
# plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
def open_train_quarterly():
df = pd.read_csv('quarterlyClassification.csv', index_col=0)
df = df[df['y_open'] != 'None']
df['true_class'] = np.where(df['input_location'] == df['true_location_open'], 1, 0)
# df['regex_class'] = np.where(df['regex_open'] == df['true_location_open'], 1, 0)
X = df[list(df.columns)]
y = df['y_open']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# cm = confusion_matrix(y_true=y, y_pred=X['regex_open'])
cm = confusion_matrix(y_true=y_train, y_pred=X_train['regex_open'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
plt.show()
cm = confusion_matrix(y_true=y_test, y_pred=X_test['regex_open'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues')
plt.show()
# logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# logistic = LogisticRegression(class_weight='balanced', random_state=69)
#
X = df[['position', 'trailing_management', 'trailing_period', 'trailing_2', 'trailing_newline', 'leading_newline',
'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'regex_open',
'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']]
y = df['y_open'].astype(int)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
logistic = RandomForestClassifier(class_weight='balanced', max_depth=6, random_state=699, n_estimators=1000)
# # # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
plot_confusion_matrix(logistic, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(logistic, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
def close_train_quarterly():
df = pd.read_csv('quarterlyClassification.csv', index_col=0)
df = df[df['y_close'] != 'None']
df['true_class'] = np.where(df['input_location'] == df['true_location_open'], 1, 0)
# df['regex_class'] = np.where(df['regex_open'] == df['true_location_open'], 1, 0)
X = df[list(df.columns)]
y = df['y_close']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# cm = confusion_matrix(y_true=y, y_pred=X['regex_open'])
cm = confusion_matrix(y_true=y_train, y_pred=X_train['regex_close'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
plt.show()
cm = confusion_matrix(y_true=y_test, y_pred=X_test['regex_close'], normalize='true')
f = sns.heatmap(cm, annot=True, cmap='Blues')
plt.show()
# logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# logistic = LogisticRegression(class_weight='balanced', random_state=69)
# X = df[['position', 'trailing_management', 'trailing_period', 'trailing_3', 'trailing_newline',
# 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'regex_close',
# 'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
# 'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis', 'trailing_quantitative']]
X = df[['position', 'trailing_period', 'trailing_3', 'leading_newline', 'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'regex_close',
'trailing_quantitative']]
y = df['y_close'].astype(int)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
logistic = RandomForestClassifier(class_weight='balanced', max_depth=5, random_state=699, n_estimators=1000)
# # # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
plot_confusion_matrix(logistic, X_train, y_train, normalize='true', cmap='Blues')
plt.show()
plot_confusion_matrix(logistic, X_test, y_test, normalize='true', cmap='Blues')
plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
plot_roc_curve(logistic, X_test, y_test)
plt.show()
plot_roc_curve(logistic, X_train, y_train)
plt.show()
def close_training():
df = pd.read_csv('data2.csv', index_col=0)
df = df[df['y_close'] != 'None']
# df['true_class'] = np.where(df['input_location'] == df['true_location_close'], 1, 0)
# df['regex_class'] = np.where(df['regex_open'] == df['true_location_open'], 1, 0)
X = df[list(df.columns)]
y = df['y_close']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
# cm = confusion_matrix(y_true=y, y_pred=X['regex_open'])
# cm = confusion_matrix(y_true=y_train, y_pred=X_train['regex_close'], normalize='true')
# f = sns.heatmap(cm, annot=True, cmap='Blues', fmt='g')
# cm = confusion_matrix(y_true=y_test, y_pred=X_test['regex_close'], normalize='true')
# f = sns.heatmap(cm, annot=True, cmap='Blues')
# logistic = GradientBoostingClassifier(max_depth=12, random_state=699)
# # logistic = DecisionTreeClassifier(class_weight='balanced')
# # logistic = LogisticRegression(class_weight='balanced', random_state=69)
X = df[['position', 'trailing_management', 'trailing_period', 'trailing_7', 'trailing_8', 'trailing_newline', 'leading_newline',
'total_size', 'leading_tab', 'leading_spaces', 'trailing_financial', 'trailing_7A', 'regex_close',
'leading_see', 'leading_text', 'leading_double_newline', 'is_uppercase',
'trailing_omission', 'leading_table_of_contents', 'leading_newline_count', 'trailing_analysis']]
# X = data[['regex_open']]
y = df['y_close'].astype(int)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=699, shuffle=True)
logistic = RandomForestClassifier(class_weight='balanced', max_depth=5, random_state=699, n_estimators=1000)
# #
# # # X = np.array(d[['position', 'trailing_management', 'trailing_period', 'trailing_7']])
logistic.fit(X_train, y_train)
# plot_confusion_matrix(logistic, X_train, y_train, cmap='Blues', normalize='true')
# plt.show()
# plot_confusion_matrix(logistic, X_test, y_test, cmap='Blues', normalize='true')
# plt.show()
explainer = shap.TreeExplainer(logistic)
shap_values = explainer.shap_values(X)
shap.summary_plot(shap_values, X)
if __name__ == "__main__":
# close_training()
# test()
# import pickle
# with open('opening_random_forest.pkl', 'rb') as f:
# clf = pickle.load(f)
#
# clf.predict(X_train)
# get_supervised('QuarterlySupervised.csv')
# test_quarterly()
close_train_quarterly()
# open_train_quarterly() | e3b261c2a79ecefdd9fc19c17308874dfb3c82ed | [
"Markdown",
"Python"
] | 26 | Python | jptree/SEC-Analytics | eddc346428d769a4c029b55795935748a182abdc | 0339acc439f5c05880423e531144906f82ec6e35 |
refs/heads/master | <repo_name>navyxie/llpayauth<file_sep>/README.md
# 连连实名认证 SDK
**注意:支持连连新版和旧版的实名认证
**新商户的商户号是以W开头**
## API
`doAuth`
```js
var llpayauth = require('llpayauth');
var intiData = {
key:"",//MD5key
sign_type:"",//加密方式
yt_pub_key:'',//连连公钥
trader_pri_key:''//商户私钥
}
var llpayauthInstance = new llpayauth(intiData);//实例化
llpayauthInstance.doAuth({
"merch_id": "2015**********2",//商户ID,连连后台查看
"outorder_no":"123456789navytest_auth_new",//商户订单id
"name_card":"**",//用户姓名
"id_card":"**"//用户身份证
},function(err,data){
if(!err){
data => {
"sign":"1ff4bf16dd6e5f4a919cfe8f5fa1a1b2",
"ret_code":"0000",
"ret_msg":"[0000] 请求成功",
"merch_id ":"W20150508000000001",
"product_id":"B10002",
"sign_type":"MD5",
"order_no ":"20150527095112",
"outorder_no ":"12312312312",
"order_fee":"2",
"id_card ":"330184199090909900",
"name_card ":"张三",
"result":"1"//返回结果:1-认证一致,2-认证不一致,3-无结果(在公安数据库中查询不到此条数据)
}
console.log(data);
}
});
```
<file_sep>/lib/config.js
module.exports = {
//↓↓↓↓↓↓↓↓↓↓请在这里配置您的基本信息↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
oid_partner:"201408071000001546",//商户编号是商户在连连钱包支付平台上开设的商户号码,为18位数字,如:201306081000001016
busi_type:'P01',
notify_type:'SYNC',
key:"201408071000001546_test_20140815",//安全检验码,以数字和字母组成的字符
//↑↑↑↑↑↑↑↑↑↑请在这里配置您的基本信息↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑
api_version:"1.0",//版本号
sign_type:"RSA",//签名方式,MD5 or RSA
input_charset:"utf-8",//字符编码格式 目前支持 gbk 或 utf-8
yt_pub_key:"<KEY>",// 银通公钥
trader_pri_key:"<KEY>",// 商户私钥
notify_url:"",// 接收异步通知地址
info_order:"考拉理财,开启懒人理财生活。",
product_id:'B10002',
}<file_sep>/test/llpayauth.js
var llpayauth = require('../lib/llpayauth');
var config = require('../lib/config');
var should = require('should');
var muk = require('muk');
var request = require('request');
describe('#doAuth()',function(){
//新版认证
describe('should ok',function(){
var llpayauthInstance = new llpayauth({
key:"test",
sign_type:"RSA",
yt_pub_key:config.yt_pub_key,
trader_pri_key:config.trader_pri_key
});
before(function(){
muk(request,'post',function(json, callback){
describe('#json.url should be "https://yintong.com.cn/tradeauthapi/v1/auth/get_auth"#',function(){
var body = JSON.parse(json.body);
// console.log(body);
//新版认证的地址必须是"https://yintong.com.cn/tradeauthapi/v1/auth/get_auth"
it('url should ok',function(){
json.url.should.be.equal("https://yintong.com.cn/tradeauthapi/v1/auth/get_auth");
});
it('sign_type should ok',function(){
body.sign_type.should.be.equal('RSA');
});
it('sign should ok',function(){
body.sign.should.be.equal("<KEY>
})
});
var res = {
"sign":"1ff4bf16dd6e5f4a919cfe8f5fa1a1b2",
"ret_code":"0000",
"ret_msg":"[0000] 请求成功",
"merch_id":"W20150508000000001",
"product_id":"B10002",
"sign_type":"RSA",
"order_no":"20150527095112",
"outorder_no":"12312312312",
"order_fee":"2",
"id_card":"330184199090909900",
"name_card":"张三",
"result":"1"
};
process.nextTick(function(){
callback(null,{statusCode:200},JSON.stringify(res));
})
})
});
after(function(){
muk.restore();
});
// new version,新商户的商户号是以W开头
it('doAuth new version should ok',function(done){
llpayauthInstance.doAuth({
"merch_id": "W2015",
"outorder_no":"123456789navytest_auth_new",
"name_card":"谢**",
"id_card":"440****",
sign_type:"RSA"
},function(err,data){
data.should.have.properties(['id_card','merch_id','name_card','order_fee','order_no','outorder_no','product_id','result','ret_code','ret_msg','sign','sign_type']);
done(err);
})
})
});
//旧版认证
describe('should ok',function(){
var llpayauthInstance = new llpayauth({
key:"test2",
sign_type:"MD5",
yt_pub_key:config.yt_pub_key,
trader_pri_key:config.trader_pri_key
});
before(function(){
muk(request,'post',function(json, callback){
var body = JSON.parse(json.body);
describe('#json.url should be "https://yintong.com.cn/tradeauthapi/auth.htm"#',function(){
//旧版认证的地址必须是"https://yintong.com.cn/tradeauthapi/auth.htm"
it('should ok',function(){
json.url.should.be.equal("https://yintong.com.cn/tradeauthapi/auth.htm");
});
it('sign_type should ok',function(){
body.sign_type.should.be.equal('MD5');
});
it('sign should ok',function(){
body.sign.should.be.equal("f8bd52c31f5b9ead575d3e72961f3d88");
})
});
var res = {
"sign":"1ff4bf16dd6e5f4a919cfe8f5fa1a1b2",
"ret_code":"0000",
"ret_msg":"[0000] 请求成功",
"merch_id":"W20150508000000001",
"product_id":"B10002",
"sign_type":"MD5",
"order_no":"20150527095112",
"outorder_no":"12312312312",
"order_fee":"2",
"id_card":"330184199090909900",
"name_card":"张三",
"result":"1"
};
process.nextTick(function(){
callback(null,{statusCode:200},JSON.stringify(res));
})
})
});
after(function(){
muk.restore();
});
// old version
it('doAuth new version should ok',function(done){
llpayauthInstance.doAuth({
"oid_partner": "2015",
"no_order":"123456789navytest_auth",
"dt_order":"20150605173032",
"name_user":"李**",
"id_no":"442****"
},false,function(err,data){
data.should.have.properties(['id_card','merch_id','name_card','order_fee','order_no','outorder_no','product_id','result','ret_code','ret_msg','sign','sign_type']);
done(err);
})
})
});
}) | 3440d53ff92e3a5dedc344045ecc13ba4cc3ab77 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | navyxie/llpayauth | a0f452751dadaa3573055daa526cd61e9affb624 | 3df25a73131df1e62bf6995dd36e2970b3469cf9 |
refs/heads/master | <file_sep>package com.example.paul.socialsportconnect;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Typeface;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Paul on 30/03/2016.
*/
public class FormSignOn extends Activity {
private EditText editTextPseudo;
private EditText editTextPassword;
private EditText editTextLastName;
private EditText editTextFirstName;
private EditText editTextBirthDate;
private EditText editTextEMail;
private EditText editTextMobile;
private RelativeLayout layout;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
layout = (RelativeLayout) RelativeLayout.inflate(this, R.layout.activity_formsignon, null);
/*
try {
JSONArray jsonArray = new JSONArray(readApi());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject firstObj = jsonArray.getJSONObject(i);
String idLivre = firstObj.getString("idLivre");
String titreLivre = firstObj.getString("titreLivre");
String auteurLivre = firstObj.getString("auteurLivre");
System.out.println("je suis la");
System.out.println(idLivre);
System.out.println(titreLivre);
System.out.println(auteurLivre);
}
} catch (JSONException e) {
e.printStackTrace();
}
*/
editTextPseudo = (EditText) layout.findViewById(R.id.editText);
editTextPassword = (EditText) layout.findViewById(R.id.editText2);
editTextLastName = (EditText) layout.findViewById(R.id.editText3);
editTextFirstName = (EditText) layout.findViewById(R.id.editText4);
editTextBirthDate = (EditText) layout.findViewById(R.id.editText5);
editTextEMail = (EditText) layout.findViewById(R.id.editText6);
editTextMobile = (EditText) layout.findViewById(R.id.editText7);
TextView textView15 = (TextView) layout.findViewById(R.id.textView15);
TextView textView16 = (TextView) layout.findViewById(R.id.textView16);
TextView textView17 = (TextView) layout.findViewById(R.id.textView17);
TextView textView18 = (TextView) layout.findViewById(R.id.textView18);
TextView textView19 = (TextView) layout.findViewById(R.id.textView19);
TextView textView20 = (TextView) layout.findViewById(R.id.textView20);
TextView textView21 = (TextView) layout.findViewById(R.id.textView21);
TextView textView22 = (TextView) layout.findViewById(R.id.textView22);
EditText editText = (EditText) layout.findViewById(R.id.editText);
EditText editText2 = (EditText) layout.findViewById(R.id.editText2);
EditText editText3 = (EditText) layout.findViewById(R.id.editText3);
EditText editText4 = (EditText) layout.findViewById(R.id.editText4);
EditText editText5 = (EditText) layout.findViewById(R.id.editText5);
EditText editText6 = (EditText) layout.findViewById(R.id.editText6);
EditText editText7 = (EditText) layout.findViewById(R.id.editText7);
Typeface faceFormSignOn = Typeface.createFromAsset(getAssets(), "fonts/AvenirLight.ttf");
textView15.setTypeface(faceFormSignOn);
textView16.setTypeface(faceFormSignOn);
textView17.setTypeface(faceFormSignOn);
textView18.setTypeface(faceFormSignOn);
textView19.setTypeface(faceFormSignOn);
textView20.setTypeface(faceFormSignOn);
textView21.setTypeface(faceFormSignOn);
textView22.setTypeface(faceFormSignOn);
editText.setTypeface(faceFormSignOn);
editText2.setTypeface(faceFormSignOn);
editText3.setTypeface(faceFormSignOn);
editText4.setTypeface(faceFormSignOn);
editText5.setTypeface(faceFormSignOn);
editText6.setTypeface(faceFormSignOn);
editText7.setTypeface(faceFormSignOn);
setContentView(layout);
}
public void insert(View view){
String pseudo = editTextPseudo.getText().toString();
String password = editTextPassword.getText().toString();
String lastName = editTextLastName.getText().toString();
String firstName = editTextFirstName.getText().toString();
String birthDate = editTextBirthDate.getText().toString();
String eMail = editTextEMail.getText().toString();
String mobile = editTextMobile.getText().toString();
insertToDatabase(pseudo, password, lastName, firstName, birthDate, eMail, mobile);
}
private void insertToDatabase(String pseudo, String password, String lastName, String firstName, String birthDate, String eMail, String mobile) {
class SendPostReqAsyncTask extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... params) {
String paramUsername = params[0];
String paramAddress = params[1];
String pseudo = editTextPseudo.getText().toString();
String password = editTextPassword.getText().toString();
String lastName = editTextLastName.getText().toString();
String firstName = editTextFirstName.getText().toString();
String birthDate = editTextBirthDate.getText().toString();
String eMail = editTextEMail.getText().toString();
String mobile = editTextMobile.getText().toString();
List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("pseudo", pseudo));
nameValuePairs.add(new BasicNameValuePair("password", <PASSWORD>));
nameValuePairs.add(new BasicNameValuePair("lastName", lastName));
nameValuePairs.add(new BasicNameValuePair("firstName", firstName));
nameValuePairs.add(new BasicNameValuePair("birthDate", birthDate));
nameValuePairs.add(new BasicNameValuePair("eMail", eMail));
nameValuePairs.add(new BasicNameValuePair("mobile", mobile));
try {
HttpClient httpClient = new DefaultHttpClient();
HttpPost httpPost = new HttpPost(
"http://socialsportconnect.fr/insert-db.php");
httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
HttpResponse response = httpClient.execute(httpPost);
HttpEntity entity = response.getEntity();
} catch (ClientProtocolException e) {
} catch (IOException e) {
}
return "Inscription réussie";
}
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
Toast.makeText(getApplicationContext(), result, Toast.LENGTH_LONG).show();
changeFormSignOnToSignInActivity(layout.findViewById(android.R.id.content));
}
}
SendPostReqAsyncTask sendPostReqAsyncTask = new SendPostReqAsyncTask();
sendPostReqAsyncTask.execute(pseudo, password, lastName, firstName, birthDate, eMail, mobile);
}
public void changeFormSignOnToSignInActivity(View v) {
Intent intent = new Intent(this, SignInActivity.class);
startActivity(intent);
overridePendingTransition(R.anim.slide_in_left, R.anim.slide_out_right);
}
}<file_sep>package com.example.paul.socialsportconnect;
/**
* Created by Paul on 03/05/2016.
*/
public class Match {
private String descriptionCreatematch;
private String stadiumCreatematch;
private String frequencyCreatematch;
private String cityCreatematch;
public Match(String cityCreatematch, String frequencyCreatematch, String stadiumCreatematch, String descriptionCreatematch) {
this.cityCreatematch = cityCreatematch;
this.frequencyCreatematch = frequencyCreatematch;
this.stadiumCreatematch = stadiumCreatematch;
this.descriptionCreatematch = descriptionCreatematch;
}
public String getCityCreatematch() {
return cityCreatematch;
}
public String getFrequencyCreatematch() {
return frequencyCreatematch;
}
public String getStadiumCreatematch() {
return stadiumCreatematch;
}
public String getDescriptionCreatematch() {
return descriptionCreatematch;
}
}
<file_sep>package com.example.paul.socialsportconnect;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Typeface;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.w3c.dom.Text;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Paul on 20/04/2016.
*/
public class FormCreateMatch extends Activity {
private EditText editTextCity;
private EditText editTextFrequency;
private EditText editTextStadium;
private EditText editTextDescription;
private RelativeLayout layout;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
layout = (RelativeLayout) RelativeLayout.inflate(this, R.layout.activity_formcreatematch, null);
/*
try {
JSONArray jsonArray = new JSONArray(readApi());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject firstObj = jsonArray.getJSONObject(i);
String idLivre = firstObj.getString("idLivre");
String titreLivre = firstObj.getString("titreLivre");
String auteurLivre = firstObj.getString("auteurLivre");
System.out.println("je suis la");
System.out.println(idLivre);
System.out.println(titreLivre);
System.out.println(auteurLivre);
}
} catch (JSONException e) {
e.printStackTrace();
}
*/
TextView form_create_title = (TextView) layout.findViewById(R.id.form_create_title);
TextView form_create_city = (TextView) layout.findViewById(R.id.form_create_city);
TextView form_create_frequence = (TextView) layout.findViewById(R.id.form_create_frequence);
TextView form_create_stade = (TextView) layout.findViewById(R.id.form_create_stade);
TextView form_create_description = (TextView) layout.findViewById(R.id.form_create_description);
Button form_create_button = (Button) layout.findViewById(R.id.form_create_button);
EditText editText10 = (EditText) layout.findViewById(R.id.editText10);
EditText editText11 = (EditText) layout.findViewById(R.id.editText11);
EditText editText12 = (EditText) layout.findViewById(R.id.editText12);
EditText editText13 = (EditText) layout.findViewById(R.id.editText13);
Typeface faceFormCreateMatch = Typeface.createFromAsset(getAssets(), "fonts/AvenirLight.ttf");
form_create_title.setTypeface(faceFormCreateMatch);
form_create_city.setTypeface(faceFormCreateMatch);
form_create_frequence.setTypeface(faceFormCreateMatch);
form_create_stade.setTypeface(faceFormCreateMatch);
form_create_description.setTypeface(faceFormCreateMatch);
form_create_button.setTypeface(faceFormCreateMatch);
editText10.setTypeface(faceFormCreateMatch);
editText11.setTypeface(faceFormCreateMatch);
editText12.setTypeface(faceFormCreateMatch);
editText13.setTypeface(faceFormCreateMatch);
editTextCity = (EditText) layout.findViewById(R.id.editText10);
editTextFrequency = (EditText) layout.findViewById(R.id.editText11);
editTextStadium = (EditText) layout.findViewById(R.id.editText12);
editTextDescription = (EditText) layout.findViewById(R.id.editText13);
setContentView(layout);
}
public void insert(View view){
String city = editTextCity.getText().toString();
String frequency = editTextFrequency.getText().toString();
String stadium = editTextStadium.getText().toString();
String description = editTextDescription.getText().toString();
insertToDatabase(city, frequency, stadium, description);
}
private void insertToDatabase(String city, String frequency, String stadium, String description) {
class SendPostReqAsyncTask extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... params) {
String paramUsername = params[0];
String paramAddress = params[1];
String city = editTextCity.getText().toString();
String frequency = editTextFrequency.getText().toString();
String stadium = editTextStadium.getText().toString();
String description = editTextDescription.getText().toString();
List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("city", city));
nameValuePairs.add(new BasicNameValuePair("frequency", frequency));
nameValuePairs.add(new BasicNameValuePair("stadium", stadium));
nameValuePairs.add(new BasicNameValuePair("description", description));
try {
HttpClient httpClient = new DefaultHttpClient();
HttpPost httpPost = new HttpPost(
"http://socialsportconnect.fr/insert-db-match.php");
httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
HttpResponse response = httpClient.execute(httpPost);
HttpEntity entity = response.getEntity();
} catch (ClientProtocolException e) {
} catch (IOException e) {
}
return "Match crée";
}
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
changeFormCreateMatchToMatch(layout.findViewById(android.R.id.content));
Toast.makeText(getApplicationContext(), result, Toast.LENGTH_LONG).show();
}
}
SendPostReqAsyncTask sendPostReqAsyncTask = new SendPostReqAsyncTask();
sendPostReqAsyncTask.execute(city, frequency, stadium, description);
}
public void changeFormCreateMatchToMatch(View v) {
Intent intent = new Intent(this, MatchActivity.class);
startActivity(intent);
overridePendingTransition(R.anim.slide_in_left, R.anim.slide_out_right);
}
}
<file_sep>package com.example.paul.socialsportconnect;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
/**
* Created by Paul on 20/04/2016.
*/
public class MatchActivity extends Activity {
RelativeLayout layout = null;
private TextView text = null;
String[] prenoms = new String[] {};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
layout = (RelativeLayout) RelativeLayout.inflate(this, R.layout.activity_match, null);
/*
text = (TextView) layout.findViewById(R.id.textView25);
text.setText(readApi());
text.setPadding(100, 100, 20, 150);
text.setTextColor(Color.parseColor("#000000"));
*/
Typeface faceCreateMatch = Typeface.createFromAsset(getAssets(), "fonts/AvenirLight.ttf");
TextView createMatch = (TextView) layout.findViewById(R.id.textView24);
createMatch.setTypeface(faceCreateMatch);
Typeface faceSearchMatch = Typeface.createFromAsset(getAssets(), "fonts/AvenirLight.ttf");
TextView searchMatch = (TextView) layout.findViewById(R.id.textView26);
TextView titleMatch = (TextView) layout.findViewById(R.id.titleMatch);
searchMatch.setTypeface(faceSearchMatch);
titleMatch.setTypeface(faceCreateMatch);
ArrayList<Match> matchs = new ArrayList<Match>();
try {
JSONArray jsonArray = new JSONArray(readApi());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject firstObj = jsonArray.getJSONObject(i);
String cityCreatematch = firstObj.getString("city_creatematch");
String frequencyCreatematch = firstObj.getString("frequency_creatematch");
String stadiumCreatematch = firstObj.getString("stadium_creatematch");
String descriptionCreatematch = firstObj.getString("description_creatematch");
System.out.println("je suis la");
System.out.println(cityCreatematch);
System.out.println(frequencyCreatematch);
System.out.println(stadiumCreatematch);
System.out.println(descriptionCreatematch);
matchs.add(new Match(cityCreatematch, frequencyCreatematch, stadiumCreatematch, descriptionCreatematch));
}
} catch (JSONException e) {
e.printStackTrace();
}
final ListView mainListView = (ListView) layout.findViewById(R.id.listView);
MatchAdaptator adapter = new MatchAdaptator(MatchActivity.this, matchs);
mainListView.setAdapter(adapter);
mainListView.setClickable(true);
mainListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int position, long arg3) {
final View v = arg1 ; // Save selected view in final variable**
TextView city = (TextView) v.findViewById(R.id.city);
String cityText = (String) city.getText();
TextView stadium = (TextView) v.findViewById(R.id.stadium);
String stadiumText = (String) stadium.getText();
TextView frequency = (TextView) v.findViewById(R.id.frequency);
String frequencyText = (String) frequency.getText();
TextView description = (TextView) v.findViewById(R.id.description);
String descriptionText = (String) description.getText();
Intent intent = new Intent(MatchActivity.this, JoinMatchActivity.class);
intent.putExtra("city", cityText);
intent.putExtra("stadium", stadiumText);
intent.putExtra("frequency", frequencyText);
intent.putExtra("description", descriptionText);
startActivity(intent);
overridePendingTransition(R.anim.animation_enter, R.anim.animation_leave);
}
});
setContentView(layout);
}
public void changeMatchToFormCreateMatch(View v) {
Intent intent = new Intent(this, FormCreateMatch.class);
startActivity(intent);
overridePendingTransition(R.anim.animation_enter, R.anim.animation_leave);
}
public String readApi() {
InputStream in = null;
String result = null;
try {
URL url = new URL("http://socialsportconnect.fr/matchlist.php");
HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection();
in = new BufferedInputStream(urlConnection.getInputStream());
result = convert(in);
} catch (Exception e) {
Log.e("error", "readApi" + e);
} finally {
try {
in.close();
} catch (Exception e) {
}
}
return result;
}
public String convert(InputStream is) {
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
StringBuilder sb = new StringBuilder();
String line = null;
try {
while ( (line = reader.readLine()) != null) {
sb.append(line + "\n");
}
} catch (Exception e) {
} finally {
try {
is.close();
} catch (Exception e) {
}
}
return sb.toString();
}
}
| 386b538e6fde074d565ea5bee743852d36968784 | [
"Java"
] | 4 | Java | PolSpock/SSC | 2f693891e70e7d7ab38a62677feaeeee7eadfd36 | b6d1339113c7df672eb01c24e3b73f46827e835e |
refs/heads/master | <file_sep>name 'myrails'
maintainer 'TSH'
license 'Apache 2.0'
description 'Installs and configures a basic CloudWatch Logs'
version '0.0.1'
| abdf60ebcbf3a33fc6c95733d28394cca2f164de | [
"Ruby"
] | 1 | Ruby | tomasz-janiczek/opsworks_logging | e50ea05123211d00538607b839072c646322b119 | 05750209bf21ccd180076a5a2c33ba6cffdd3cd1 |
refs/heads/master | <file_sep>Transfer is to move what clone is to copy
=========================================
Note: This crate, as well as [`stackpin`](https://github.com/dureuill/stackpin/) is very much a work in progress, and is published in the hope that it will be of interest for further work.
The `Transfer` trait executes user code to take a value from an unmovable instance of a struct to another instance.
In this way, it is similar to the `Clone` trait, that allows to execute user code to clone a value that is not copiable.
The `Transfer` trait is also comparable to the move constructor of C++.
Hold on, what is an unmovable struct?
-------------------------------------
Rust does not natively expose the concept of "unmovable types". However, thanks to [`Pin`](std::pin::Pin) and `unsafe`, it is possible to express this concept in the type system.
`Transfer` leverages the [`stackpin`](https://github.com/dureuill/stackpin/) crate (by the same author) to build type safe abstractions for Unmovable types.
Examples
--------
* The unit tests for `Transfer` demonstrate a `SecretU64` type, that attempt to erase itself securely when it gets out of scope.
* An example for `Transfer` is `DynRef`, a type of reference that uses an external `Lifetime` struct to represent the lifetime of `DynRef`.
<file_sep>use stackpin::{stack_let, FromUnpinned, PinStack, Unpinned};
use std::cell::Cell;
use std::marker::{PhantomData, PhantomPinned};
use transfer::{transfer, Tr, Transfer};
pub struct DynRef<T: ?Sized>(Cell<Option<*const T>>);
impl<T: ?Sized> DynRef<T> {
pub fn new() -> Self {
Self(Cell::new(None))
}
unsafe fn get(&self) -> Option<&T> {
self.0.get().map(|ptr| &*ptr)
}
pub fn map<U, F: Fn(&T) -> U>(&self, f: F) -> Option<U> {
unsafe { self.get().map(f) }
}
pub fn is_some(&self) -> bool {
self.0.get().is_some()
}
pub fn is_none(&self) -> bool {
self.0.get().is_none()
}
pub fn lock<'dr, 'br>(
&'dr self,
br: &'br T,
) -> Unpinned<(&'br T, &'dr Self), Lifetime<'dr, 'br, T>>
where
T: Sized, // FIXME: This shouldn't be required
{
Unpinned::new((br, self))
}
}
struct Dropper<'dr, T: ?Sized + 'dr>(Option<&'dr DynRef<T>>);
pub struct Lifetime<'dr, 'br, T: ?Sized + 'dr + 'br> {
dynref: Dropper<'dr, T>,
_data: PhantomData<&'br T>,
_pin: PhantomPinned,
}
impl<'dr, T: ?Sized + 'dr> Drop for Dropper<'dr, T> {
fn drop(&mut self) {
match self.0 {
Some(DynRef(cell)) => cell.set(None),
None => {}
}
}
}
impl<'dr, 'br, T> Lifetime<'dr, 'br, T> {
fn new_empty() -> Self {
Self {
dynref: Dropper(None),
_data: PhantomData,
_pin: PhantomPinned,
}
}
}
unsafe impl<'dr, 'br, T> FromUnpinned<(&'br T, &'dr DynRef<T>)> for Lifetime<'dr, 'br, T> {
type PinData = (&'br T, &'dr DynRef<T>);
unsafe fn from_unpinned(data: Self::PinData) -> (Self, Self::PinData) {
(Self::new_empty(), data)
}
unsafe fn on_pin(&mut self, (val, dynref): Self::PinData) {
let ptr = val as *const T;
dynref.0.set(Some(ptr));
self.dynref = Dropper(Some(dynref));
}
}
unsafe impl<'dr, 'br, T> Transfer for Lifetime<'dr, 'br, T> {
fn empty() -> Tr<Self> {
Tr::from_empty(Self::new_empty())
}
unsafe fn transfer(src: &mut PinStack<'_, Self>, dst: *mut Self) {
(*dst).dynref.0 = src.dynref.0;
src.as_mut().get_unchecked_mut().dynref.0 = None
}
}
fn main() {
let dr = DynRef::new();
assert!(dr.is_none());
{
let s = String::from("foo");
{
stack_let!(_lifetime = dr.lock(&s));
// you can throw the lifetime OK!
std::mem::drop(_lifetime);
assert!(dr.is_some());
}
assert!(dr.is_none());
}
println!("foo: {}", transfer_if_odd("foo"));
println!("foobar: {}", transfer_if_odd("foobar"));
}
fn transfer_if_odd(val: &'static str) -> bool {
let dr = DynRef::new();
{
let mut lifetime = Lifetime::empty();
let s = String::from(val);
assert!(dr.is_none());
{
stack_let!(inner_lifetime = dr.lock(&s));
assert!(dr.is_some());
if val.len() % 2 == 1 {
transfer(inner_lifetime, &mut lifetime);
}
assert!(dr.is_some());
}
dr.is_some()
}
}
<file_sep>use stackpin::PinStack;
///
/// # Safety
///
/// * Implementers **must** write a valid `Self` to the `dst` argument of `transfer`
/// * Implementers are **not** allowed to panic in the `transfer` function
/// * Implementers **must** reset `pin` to a value that can be safely dropped without incidence on
/// the `dst` pointer that was written to in the `transfer` function
pub unsafe trait Transfer {
/// # Safety
///
/// * Callers of this function **must** call `reset` on the `src` argument right afterwards.
/// * `dst` must point to a `Self` instance, that can possibly be uninitialized
/// * `src` and `dest` **must** point to different instances.
unsafe fn transfer(src: &mut PinStack<'_, Self>, dst: *mut Self)
where
Self: Sized;
fn empty() -> Tr<Self>;
}
pub struct Tr<T: ?Sized>(T);
impl<T: Transfer> Tr<T> {
pub fn from_empty(empty: T) -> Self {
Self(empty)
}
fn slot(&mut self) -> *mut T {
&mut self.0 as *mut T
}
}
pub fn transfer<'old, 'new, T>(
mut src: PinStack<'old, T>,
dest: &'new mut Tr<T>,
) -> PinStack<'new, T>
where
T: Transfer,
{
use stackpin::StackPinned;
use std::pin::Pin;
unsafe {
let slot = dest.slot();
T::transfer(&mut src, slot);
Pin::new_unchecked(StackPinned::new(&mut *slot))
}
}
#[macro_export]
macro_rules! transfer_let {
($id:ident = $fun_name:ident ($($arg:expr),*)) => {
let mut $id = $crate::Transfer::empty();
let $id = $fun_name($($arg),* &mut $id);
};
($id:ident = $e:expr) => {
let mut $id = $crate::Transfer::empty();
let $id = $crate::transfer($e, &mut $id);
};
}
#[cfg(test)]
mod tests {
mod secret {
use std::marker::PhantomPinned;
pub struct SecretU64(u64, PhantomPinned);
fn secure_erase(x: &mut u64) {
*x = 0;
}
use super::super::{Tr, Transfer};
use stackpin::FromUnpinned;
use stackpin::PinStack;
unsafe impl<'a> FromUnpinned<&'a mut u64> for SecretU64 {
type PinData = &'a mut u64;
unsafe fn from_unpinned(src: &'a mut u64) -> (Self, &'a mut u64) {
(Self(0, PhantomPinned), src)
}
unsafe fn on_pin(&mut self, data: &'a mut u64) {
self.0 = *data;
println!(
"Secure erasing data that served for construction at {:p}",
data
);
secure_erase(data);
}
}
unsafe impl Transfer for SecretU64 {
unsafe fn transfer(src: &mut PinStack<'_, Self>, dst: *mut Self) {
(*dst).0 = src.0;
secure_erase(&mut src.as_mut().get_unchecked_mut().0);
println!(
"Secure erasing on transfer for {:p}",
&mut src.as_mut().get_unchecked_mut().0
);
}
fn empty() -> Tr<Self> {
Tr::from_empty(Self(0, PhantomPinned))
}
}
impl SecretU64 {
pub fn reveal(this: &PinStack<'_, Self>) -> u64 {
this.0
}
}
impl Drop for SecretU64 {
fn drop(&mut self) {
if self.0 == 0 {
println!("Not erasing empty secret at {:p}", self);
} else {
println!("Secure erasing in dtor for {:p}", self);
secure_erase(&mut self.0)
}
}
}
pub fn generate_secret(slot: &mut crate::Tr<SecretU64>) -> PinStack<'_, SecretU64> {
let mut secret = 42;
stackpin::stack_let!(secret = stackpin::Unpinned::new(&mut secret));
crate::transfer(secret, slot)
}
}
use secret::SecretU64;
#[test]
fn outin_transfer() {
use secret::generate_secret;
super::transfer_let!(my_secret = generate_secret());
assert_eq!(SecretU64::reveal(&my_secret), 42);
}
fn transfer_secret(outer_secret: stackpin::PinStack<'_, secret::SecretU64>) {
super::transfer_let!(inner_secret = outer_secret);
assert_eq!(SecretU64::reveal(&inner_secret), 83);
}
#[test]
fn inout_transfer() {
let mut initial_secret = 83u64;
stackpin::stack_let!(my_secret: SecretU64 = &mut initial_secret);
transfer_secret(my_secret);
assert_eq!(initial_secret, 0);
}
}
<file_sep>[package]
name = "transfer"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
license = "MIT OR Apache-2.0"
description = "Crate that exposes a Transfer trait, that is to move what Clone is to copy"
repository = "https://github.com/dureuill/transfer"
documentation = "https://docs.rs/transfer"
readme = "README.md"
maintenance = "experimental"
keyword = ["pin", "ownership", "self", "reference"]
categories = ["rust-patterns"]
edition = "2018"
[dependencies]
stackpin = "0.0.2"
| b7ca76135b0b440b8f4604fdd8059a87b04c054e | [
"Markdown",
"Rust",
"TOML"
] | 4 | Markdown | dureuill/transfer | 575e183f138f3b5af03c75a6f7b7b819e20aec49 | 22d9ebd5c75f252b1b4d6154d7e248daab3481d7 |
refs/heads/master | <file_sep>import pandas as pd
train = pd.read_csv("train.csv")
train = train.dropna()
y = train['Survived']
X = train.drop(['Survived', 'PassengerId', 'Name', 'Ticket', 'Cabin'], 1, inplace=True)
X = pd.get_dummies(train)
from sklearn import tree
dtc = tree.DecisionTreeClassifier()
dtc.fit(X, y)
test = pd.read_csv("test.csv")
ids = test[['PassengerId']]
test.drop(['PassengerId', 'Name', 'Ticket', 'Cabin'], 1, inplace=True)
test.fillna(2, inplace=True)
test = pd.get_dummies(test)
predictions = dtc.predict(test)
results = ids.assign(Survived = predictions)
results.to_csv("titanic-results.csv", index=False)
| fb1953bbf13fa9274aeb799273c745f4dfab88ad | [
"Python"
] | 1 | Python | ogyalcin/kaggle-competitions | 43c6237fa40458e279a3b0314a04a932ec4a62cf | 7f3f35a9ad98e0d5ddc13f35c8be33753a2d116e |
refs/heads/main | <repo_name>Mishka31/goit-react-hw-06-CounterTEST<file_sep>/src/App.js
import { useSelector, useDispatch } from "react-redux";
import * as actions from "./redux//counter/counter-actions.js";
import "./App.css";
export default function App() {
const value = useSelector((state) => state.counter.value);
const step = useSelector((state) => state.counter.step);
const dispatch = useDispatch();
return (
<div className="App">
<div>
<button
type="button"
onClick={() => dispatch(actions.increment(value))}
>
Increment on {step}
</button>
<h1>{value}</h1>
<button
type="button"
onClick={() => dispatch(actions.decrement(value))}
>
Dectrement on {step}
</button>
</div>
</div>
);
}
// const mapStateToProps = (state) => ({
// value: state.counter.value,
// step: state.counter.step,
// });
// const mapDispatchToProps = (dispatch) => ({
// onIncrement: (value) => dispatch(actions.increment(value)),
// onDecrement: (value) => dispatch(actions.decrement(value)),
// });
// export default connect(mapStateToProps, mapDispatchToProps)(App);
<file_sep>/src/redux/counter/counter-actions.js
import types from "./counter-type.js";
export const increment = (value) => ({
type: types.INCREMENT,
payload: value,
});
export const decrement = (value) => ({
type: types.DECREMENT,
payload: value,
});
<file_sep>/src/redux/counter/counter-reducer.js
import { createStore, combineReducers } from "redux";
import types from "./counter-type.js";
const valueReducer = (state = 10, { type, payload }) => {
switch (type) {
case types.INCREMENT:
return state + payload;
case types.DECREMENT:
return state - payload;
default:
return state;
}
};
const stepReducer = (state = 15, action) => state;
export default combineReducers({
value: valueReducer,
step: stepReducer,
});
| 7288ff0c9977fba9d0b2bdd3e370df5b6fe221ce | [
"JavaScript"
] | 3 | JavaScript | Mishka31/goit-react-hw-06-CounterTEST | 0bbcfb898e747f003ecfdef3acada096f0a0dd07 | cf795139b2dc2f1238d518f8a678c23e4e12fcce |
refs/heads/master | <repo_name>mishapadidar/GP-Risk-Neutral<file_sep>/plot_risk_kernel.py
import numpy as np;
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from surrogate import GaussianProcessRegressor
from surrogate import GaussianProcessRiskNeutral
from riskkernel import Normal_SEKernel
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import rbf_kernel
"""
Plot the once mollified risk kernel analytically and with monte carlo
"""
# once mollified kernel
def psi_MonteCarlo(kernel,r,mu,sigma,num_pts_MC):
N = len(r);
z = np.array([[0]])
U = np.random.normal(mu, sigma, num_pts_MC)
ev = np.zeros(N)
for i in range(N):
fxmU = np.array([ kernel(z,np.array([[ r[i]-U[j] ]])) for j in range(num_pts_MC) ])
ev[i] = np.mean(fxmU)
return ev
#================================
# basic info
dim =1
N = 100
b = 5
r = np.linspace(0,b,N) # radius
# kernel
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.01, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
#================================
# plot the kernel
Z = np.array([[0.0]])
R = np.reshape(r,(N,1))
plt.plot(r,kernel(Z,R)[0],label='kernel')
#================================
# plot risk kernel psi with monte carlo
mu = 0
var = 0.1
sigma = np.sqrt(var)
num_pts_MC = 1000
ev = psi_MonteCarlo(kernel,r,mu,sigma,num_pts_MC)
plt.plot(r,ev,color='g',label='psi Monte Carlo')
#================================
# plot the risk kernel
riskkernel = Normal_SEKernel(var*np.eye(dim))
psi_risk = riskkernel.mollifiedx1(Z,R)[0]
plt.plot(r,psi_risk,'--',color='g',label='psi risk')
plt.title('Analytic versus Monte Carlo Risk Kernel')
plt.legend()
plt.show()
<file_sep>/tests/basic_MC_on_surrogate.py
"""
Example file for Monte-Carlo 1D
Compute the Risk-Neutral and Mean-Variance surrogates by
Monte Carlo integration on the function f
"""
import sys
sys.path.insert(1, '../')
import numpy as np
import matplotlib.pyplot as plt
from MonteCarlo1D_fun import MonteCarlo1D
#=============================================================
# Settings
#=============================================================
f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
#f = lambda x: np.exp(-(x-0.5)**2)*np.sin(30*x)
dim = 1
lb = -1.5*np.ones(dim)
ub = 1.5*np.ones(dim)
num_pts_MC = 100
#=============================================================
# Compute Monte-Carlo evaluations
#=============================================================
eta = 1;
Ntest = 100;
Xtest = np.linspace(lb,ub,Ntest).reshape((Ntest,dim))
ev,va = MonteCarlo1D(f,Xtest,Ntest,num_pts_MC)
#=============================================================
# Plot
#=============================================================
# plot true function
ftrue = np.array([f(x) for x in Xtest])
plt.plot(Xtest.flatten(),ftrue,linewidth=3, color='b',label='function')
# plot risk-neutral function
plt.plot(Xtest.flatten(),ev,linewidth=3, color='r',label='risk-neutral')
# plot risk-averse function
plt.plot(Xtest.flatten(),ev+eta*va,linewidth=3, color='orange',label='risk-averse')
# plot figure
plt.title('Measures of Risk Calculated via Monte Carlo on function')
plt.legend()
plt.show()
<file_sep>/MonteCarlo_strategies.py
"""
Function to evaluate Monte-Carlo in 1D
"""
import numpy as np
def gaussian(x, mu, sig):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
def MonteCarlo_RN(surrogate,C,num_pts_MC):
#draw num_pts_MC realizations of RV (here gaussian)
mu = 0;
sigma = 0.1;
U = np.random.normal(mu, sigma, num_pts_MC)
N_C = len(C);
ev = np.zeros(N_C);
help_ev = np.zeros(num_pts_MC);
X_help = np.zeros(num_pts_MC);
for i in range(0,N_C):
X_help = np.ones(num_pts_MC)*C[i]-U;
help_ev = surrogate.predict(X_help.reshape((num_pts_MC,1)))
#compute expected value
ev[i] = 1/num_pts_MC*np.sum(help_ev);
return ev
def MonteCarlo_MV(surrogate,C,num_pts_MC, eta):
#draw num_pts_MC realizations of RV (here gaussian)
mu = 0;
sigma = 0.1;
U = np.random.normal(mu, sigma, num_pts_MC)
N_C = len(C);
ev = np.zeros(N_C);
va = np.zeros(N_C);
help_ev = np.zeros(num_pts_MC);
help_va = np.zeros(num_pts_MC);
X_help = np.zeros(num_pts_MC);
for i in range(0,N_C):
X_help = np.ones(num_pts_MC)*C[i]-U;
help_ev = surrogate.predict(X_help.reshape((num_pts_MC,1)))
#compute expected value
ev[i] = 1/num_pts_MC*np.sum(help_ev);
for j in range(0,num_pts_MC):
help_va[j] = (help_ev[j]-ev[i])**2
# compute variance
va[i] = 1/num_pts_MC*sum(help_va);
return ev+eta*va
<file_sep>/MonteCarlo1D_fun.py
"""
Function to evaluate Monte-Carlo in 1D
"""
import numpy as np
def gaussian(x, mu, sig):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
def MonteCarlo1D(f,Xtest,Ntest,num_pts_MC):
#draw num_pts_MC realizations of RV (here gaussian)
mu = 0;
sigma = 0.1;
U = np.random.normal(mu, sigma, num_pts_MC)
ev = np.zeros(Ntest);
va = np.zeros(Ntest);
help_ev = np.zeros(num_pts_MC);
help_va = np.zeros(num_pts_MC);
for i in range(0,Ntest):
for j in range(0,num_pts_MC):
help_ev[j] = f(Xtest[i]-U[j]);
#compute expected value
ev[i] = 1/num_pts_MC*np.sum(help_ev);
for j in range(0,num_pts_MC):
help_va[j] = (help_ev[j]-ev[i])**2
# compute variance
va[i] = 1/num_pts_MC*sum(help_va);
return ev,va
def MonteCarlo1D_surrogate(surrogate,Xtest,Ntest,num_pts_MC):
#draw num_pts_MC realizations of RV (here gaussian)
mu = 0;
sigma = 0.1;
U = np.random.normal(mu, sigma, num_pts_MC)
ev = np.zeros(Ntest);
va = np.zeros(Ntest);
help_ev = np.zeros(num_pts_MC);
help_va = np.zeros(num_pts_MC);
X_help = np.zeros(num_pts_MC);
for i in range(0,Ntest):
X_help = np.ones(num_pts_MC)*Xtest[i]-U;
help_ev = surrogate.predict(X_help.reshape((num_pts_MC,1)))
#compute expected value
ev[i] = 1/num_pts_MC*np.sum(help_ev);
for j in range(0,num_pts_MC):
help_va[j] = (help_ev[j]-ev[i])**2
# compute variance
va[i] = 1/num_pts_MC*sum(help_va);
return ev,va
<file_sep>/tests/test_GP_surrogate.py
import numpy as np;
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from surrogate import GaussianProcessRegressor
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import rbf_kernel
"""
Verify the fit of a GP.
Plot the Likelihood function.
"""
import sys
sys.path.insert(1, '../')
dim =1
N = 100
a = -1
b = 1
X = np.random.uniform(a,b,(N,dim))
f = lambda x: np.exp(-(x-0.5)**2)*np.sin(30*x)
#f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
fX = f(X).flatten();
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.1, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
surrogate = GaussianProcessRegressor(kernel =kernel,n_restarts_optimizer=3)
#================================
# Plot a GP
surrogate.fit(X,fX)
M = 400
xx = np.linspace(a,b,M)
m,v = surrogate.predict(xx.reshape((M,dim)), True)
m = m.flatten()
v = v.flatten()
plt.plot(xx,m,label='GP')
plt.fill_between(xx,m,m+v,color='b',alpha=0.5)
plt.fill_between(xx,m,m-v,color='b',alpha=0.5)
plt.scatter(X.flatten(),fX,color='k',label='data')
plt.legend()
plt.show()
<file_sep>/bayesopt.py
import numpy as np
"""
Bayesian Optimization
ex:
from experimental_design import SymmetricLatin Hypercube as SLHC
from strategy import randomSample
f = lambda x: np.linalg.norm(x)
dim = 2
Sigma = np.eye(dim)
max_evals = 50
lb = -10*np.ones(dim)
ub = 10*np.ones(dim)
num_pts = 2*dim + 1
exp_design = SLHC(dim, num_pts)
strategy = randomSample(lb,ub)
problem = BayesianOptimization(f, dim, Sigma, max_evals, exp_design, strategy, surrogate, lb, ub)
xopt,fopt = problem.minimize()
"""
class BayesianOptimization():
"""
f: R^n -> R, function handle
dim: int, problem dimension
Sigma: 2D - covariance matrix for perturbations u
size (dim,dim)
max_evals: int, maximum function evaluations
exp_des: experimental design object
ex: SymmetricLatinHypercube(dim,num_pts)
strategy: strategy object
ex: randomSample(lb, ub)
lb: np.array(), lower-bounds
ub: np.array(), upper-bounds
"""
def __init__(self, f, dim, max_evals, exp_design, strategy, surrogate, lb, ub):
self.f = f
self.dim = dim
self.max_evals = max_evals
self.exp_des = exp_design
self.strategy = strategy
self.surrogate = surrogate
self.lb = lb
self.ub = ub
self.X = None; # data points X
self.fX = None; # function evals
# solve the optimization problem
def minimize(self):
# initial sampling
X = self.exp_des.generate_points(self.lb, self.ub);
# evaluate f at X; ensure it returns number not array
fX = np.array([float(self.f(x)) for x in X])
evals = len(fX)
# fit a gp
self.surrogate.fit(X,fX)
# start optimizing
for i in range(self.max_evals-evals):
# choose next point with strategy
xi = self.strategy.generate_evals(self.surrogate)
# evaluate point; ensure it returns number not array
fi = float(self.f(xi))
# update surrogate
self.surrogate.update(np.atleast_2d(xi),fi)
# save the evaluation history
self.X = self.surrogate.X
self.fX = self.surrogate.fX
# return surrogate's best evaluation (not function's)
f_surr = self.surrogate.predict(self.X)
iopt = np.argmin(f_surr)
fopt = f_surr[iopt]
xopt = self.X[iopt]
return xopt,fopt
<file_sep>/examples/BO_RiskNeutral.py
"""
sequentially plot expected improvement
Plot the expected improvement, Risk-Neutral Surrogate
and function.
The variable II between [1,max_evals] allows you to look
at what the GP and expected improvement looked like
after the first II points were evaluated.
You can also change the strategy in the Bayesian Optimization
Section. There are currently three options: randomStrategy,
EIStrategy, POIStrategy (random sampling, expected improvement
probability of improvement)
"""
import sys
sys.path.insert(1, '../')
import numpy as np
from bayesopt import BayesianOptimization
from riskkernel import Normal_SEKernel
from surrogate import GaussianProcessRiskNeutral
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from strategy import RandomStrategy, EIStrategy, POIStrategy
from experimental_design import SymmetricLatinHypercube as SLHC
import matplotlib.pyplot as plt
# number of points used in plots
# use II < max_evals
II = 35
#=============================================================
# Run Bayesian Optimization
#=============================================================
f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
#f = lambda x: np.exp(-(x-0.5)**2)*np.sin(30*x)
dim = 1
max_evals = 60
Sigma = 0.01*np.eye(dim)
lb = -1.5*np.ones(dim)
ub = 1.5*np.ones(dim)
num_pts = 25*dim + 1 # initial evaluations
exp_design = SLHC(dim, num_pts)
#strategy = POIStrategy(lb,ub)
#strategy = RandomStrategy(lb,ub)
strategy = EIStrategy(lb,ub)
kernel = Normal_SEKernel(Sigma)
# customizing bounds for the kernel hyperparams
# the following line can be ommitted.
kernel.GPkernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (1e-2, 2)) + \
WhiteKernel(1e-3, (1e-6, 1e-1))
surrogate = GaussianProcessRiskNeutral(kernel)
# initialize the problem
problem = BayesianOptimization(f,dim, max_evals, exp_design, strategy, surrogate,lb, ub)
# solve it
xopt,fopt = problem.minimize()
#=============================================================
# Plot
#=============================================================
# get the function evaluations
X = problem.X
fX = problem.fX
Ntest = 300
Xtest = np.linspace(lb,ub,Ntest).reshape((Ntest,dim))
# fit the surrogate to the first II points
surrogate.fit(X[:II],fX[:II])
# predict the risk neutral and standard error
ftest, std = surrogate.predict(Xtest, std=True)
#compute aquisition function
args = [surrogate]
acquisition = []
for x in Xtest:
acquisition.append(strategy.objective(x,args))
# plot acquisition function
plt.plot(Xtest.flatten(),acquisition,color='red',label='acquisition function')
# plot Next Evaluation
plt.scatter(X[II+1],fX[II+1],color='red',s = 150,marker=(5,1), label='Next Evaluation')
# plot gp
plt.plot(Xtest.flatten(),surrogate.GP.predict(Xtest),color='green',label='GP')
# plot surrogate
plt.plot(Xtest.flatten(),ftest,linewidth=3, color='orange',label='GPRN')
# plot the 95% confidence interval
plt.fill_between(Xtest.flatten(),ftest-1.96*std,ftest+1.96*std,alpha=0.3)
# plot true function
ftrue = np.array([f(x) for x in Xtest])
plt.plot(Xtest.flatten(),ftrue,linewidth=3, color='b',label='function')
# plot data
plt.scatter(X[:II].flatten(),fX[:II],color='k', label='data')
plt.title('Bayesian Optimization Under Uncertainty')
plt.legend()
plt.show()
<file_sep>/tests/test_EI_2D.py
"""
Test EI strategy in 2 dimensions
"""
import sys
sys.path.insert(1, '../')
import numpy as np
from bayesopt import BayesianOptimization
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from riskkernel import Normal_SEKernel
from surrogate import GaussianProcessRegressor
from strategy import RandomStrategy, EIStrategy, POIStrategy, SRBFStrategy
from experimental_design import SymmetricLatinHypercube as SLHC
import matplotlib.pyplot as plt
#=============================================================
# Run Bayesian Optimization
#=============================================================
# function f: R^n -> R
f = lambda x: np.sqrt((x[0]-0.382)**2 + x[1]**2)
g = lambda x,y: np.sqrt((x-0.382)**2 + y**2)
#f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
# basic info
dim = 2
lb = -np.ones(dim)
ub = np.ones(dim)
max_evals = 50
# experimental design
num_pts = 12*dim + 1 # initial evaluations
exp_design = SLHC(dim, num_pts)
# strategy
strategy = POIStrategy(lb,ub)
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.01, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
surrogate = GaussianProcessRegressor(kernel =kernel)
# initialize the problem
problem = BayesianOptimization(f,dim, max_evals, exp_design, strategy, surrogate,lb, ub)
# solve it
xopt,fopt = problem.minimize()
#=============================================================
# Plot
#=============================================================
Ntest = 300
x = np.linspace(lb[0],ub[0],Ntest)
y = np.linspace(lb[1],ub[1],Ntest)
X,Y = np.meshgrid(x,y)
# plot function
plt.contour(X,Y,g(X,Y))
#plt.plot(np.linspace(-1,1,Ntest),f(np.linspace(-1,1,Ntest)))
# plot optimum
plt.scatter(xopt[0],xopt[1],color='red',s = 150,marker=(5,1), label='Optimum')
plt.title('Bayesian Optimization')
plt.legend()
plt.show()
<file_sep>/tests/test_GP_2D.py
"""
fit the GP on 2D data
"""
import sys
sys.path.insert(1, '../')
import numpy as np
from experimental_design import SymmetricLatinHypercube as SLHC
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from surrogate import GaussianProcessRegressor
import matplotlib.pyplot as plt
# function f: R^n -> R
f = lambda x: x[0]**2 *(4-2.1*x[0]**2 + x[0]**4/3) + x[0]*x[1] + (-4+4*x[1]**2)*x[1]**2
# just for plotting...easier to evaluate with meshgrid
g = lambda x,y: x**2 *(4-2.1*x**2 + x**4/3) + x*y + (-4+4*y**2)*y**2
# basic info
dim = 2
lb = np.array([-3.0,-2.0])
ub = np.array([3.0,2.0])
# experimental design
num_pts = 500 # initial evaluations
exp_design = SLHC(dim, num_pts)
X = exp_design.generate_points(lb,ub)
fX = [float(f(x)) for x in X]
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.01, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
surrogate = GaussianProcessRegressor(kernel =kernel)
surrogate.fit(X,fX)
#=============================================================
# Plot
#=============================================================
# plot the function
Ntest = 500
x = np.linspace(lb[0],ub[0],Ntest)
y = np.linspace(lb[1],ub[1],Ntest)
X,Y = np.meshgrid(x,y)
Z = g(X,Y)
plt.contour(X,Y,Z,range(1,5000,30))
# plot the GP
X_grid = np.c_[ np.ravel(X), np.ravel(Y) ]
zz = surrogate.predict(X_grid)
zz = zz.reshape(X.shape)
#plt.contour(X,Y,zz-Z)
plt.colorbar()
plt.title('Difference Between GP and 6 Hump Camel')
plt.legend()
plt.show()
<file_sep>/strategy.py
import numpy as np
from scipy.stats import norm
from scipy.optimize import minimize
class RandomStrategy():
""" Randomly Generate a point
output: 1D-array
"""
def __init__(self,lb,ub):
self.lb = lb;
self.ub = ub;
def generate_evals(self,surrogate):
return np.random.uniform(self.lb,self.ub)
class EIStrategy():
"""
Choose the next point by maximizing
expected improvement from: equation (35) Jones 2001
output: 1D-array
"""
def __init__(self,lb,ub):
self.lb = lb;
self.ub = ub;
self.num_multistart = 10;
def objective(self,xx,args):
"""
xx: evaluation point as a 1D-array
This function cannot evaluate a vector of points
args: [surrogate]
"""
# unpack surrogate from optimizer
surrogate = args[0]
# surrogate needs 2D-array input
xx = np.atleast_2d(xx)
# predict surrogate
y,s = surrogate.predict(xx,std = True)
y = float(y)
s = float(s)
# best point so far, in terms of surrogate, not function, value
fmin = min(surrogate.predict(surrogate.X))
# define u
u = (fmin-y)/s
# compute Phi(u): standard normal cdf at u
Phi = norm.cdf(u)
# compute phi(u): standard normal pdf at u
phi = norm.pdf(u)
# objective
EI = s*(u*Phi + phi)
return EI
def negobjective(self,xx,args):
"""
Negative expected improvement
xx: evaluation point as a 1D-array (one point only)
args: [surrogate]
"""
return -self.objective(xx,args)
def generate_evals(self,surrogate):
"""optimize the expected improvement objective
input: surrogate
output: 1D-array
"""
# use multistart
best_val = np.inf
for i in range(self.num_multistart):
# generate 1D-array initial guess
x0 = np.random.uniform(self.lb,self.ub)
# arguments for optimizer
args = [surrogate]
bounds = list(zip(self.lb,self.ub))
# MAXIMIZE expected improvement
sol = minimize(self.negobjective, x0, args =args, method='SLSQP',bounds =bounds)
if sol.fun < best_val:
next_pt = sol.x
return next_pt
class POIStrategy():
""" Optimize Probability of Improvement
equation (35) Jones 2001
to return next evaluation point
return: 1D-array
"""
def __init__(self,lb,ub):
self.lb = lb;
self.ub = ub;
self.num_multistart = 10;
self.alpha = 0.001 # Percent improvement desired
def objective(self,xx,args):
"""
xx: evaluation point as a 1D-array
This function cannot evaluate a vector of points
args: [surrogate]
return: probabilty of Improvement at xx
"""
# unpack surrogate from optimizer
surrogate = args[0]
# surrogate needs 2D-array input
xx = np.atleast_2d(xx)
# predict surrogate
y,s = surrogate.predict(xx,std = True)
y = float(y)
s = float(s)
# best point so far, in terms of surrogate, not function, value
fmin = min(surrogate.predict(surrogate.X))
# improvement goal
fgoal = (1.0-self.alpha)*fmin
# define u
u = (fgoal-y)/s
# compute Phi(u): standard normal cdf at u
POI = norm.cdf(u)
return POI
def negobjective(self,xx,args):
"""
Negative probability of improvement
xx: evaluation point as a 1D-array (one point only)
args: [surrogate]
"""
return -self.objective(xx,args)
def generate_evals(self,surrogate):
"""generate next evaluation by optimizing
the probability improvement objective
input: surrogate
output: 1D-array
"""
# use multistart
best_val = np.inf
for i in range(self.num_multistart):
# generate 1D-array initial guess
x0 = np.random.uniform(self.lb,self.ub)
# arguments for optimizer
args = [surrogate]
bounds = list(zip(self.lb,self.ub))
# MAXIMIZE expected improvement
sol = minimize(self.negobjective, x0, args =args, method='SLSQP',bounds =bounds)
if sol.fun < best_val:
next_pt = sol.x
return next_pt
class SRBFStrategy():
"""
Global Metric SRBF strategy from
<NAME>, <NAME>, (2007) A Stochastic
Radial Basis Function Method for the Global Optimization of
Expensive Functions. INFORMS Journal on Computing 19(4):497-509
cycle through weights to determine local vs global search.
Return the next evaluation as a 2D array.
"""
def __init__(self,lb,ub,num_candidates=10):
self.lb = lb;
self.ub = ub;
self.num_candidates = num_candidates
# for weights
self.cycle_length = 5
self.weights = np.linspace(0,1,self.cycle_length)
self.weight_index = 0 # initialize at 0
def generate_evals(self,surrogate):
# generate candidates
dim = surrogate.dim
C = np.random.uniform(self.lb,self.ub, (self.num_candidates, dim))
# estimate function value
fC = surrogate.predict(C)
df = max(fC)-min(fC)
# evaluate minimum distance from previous points
D = np.array([min(np.linalg.norm(c-surrogate.X,axis=1)) for c in C]).flatten()
# largest minus smallest distance
dD = max(D)-min(D)
# compute score for response surface criterion
if df == 0.0:
VR = np.ones(len(C))
else:
VR = (fC - min(fC))/df
# compute score for distance criterion
if dD == 0.0:
VD = np.ones(len(C))
else:
VD = (max(D)-D)/dD
# compute weighted score
w = self.weights[self.weight_index]
score = w*VR + (1-w)*VD
# choose minimizer
iopt = np.argmin(score)
xopt = C[iopt]
# update weight for next time
self.wi = (self.weight_index + 1)%self.cycle_length
return xopt
<file_sep>/surrogate.py
# risk-neutral GP example
# gaussian kernel
import numpy as np
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from sklearn.gaussian_process import GaussianProcessRegressor as GPR
from scipy.optimize import minimize
import cvxpy as cp
class GaussianProcessRegressor():
"""Wrapper on sklearn GaussianProcessRegressor
"""
def __init__(self,kernel,n_restarts_optimizer=0):
self.dim = 0; # input data dimension
self.X = None; # data points
self.fX = None; # function evals
#self.kernel = kernel; # kernel function
#self.n_restarts_optimizer = n_restarts_optimizer
self.GP = GPR(kernel=kernel,n_restarts_optimizer=n_restarts_optimizer)
# "fit" a GP to the data
def fit(self, X,fX):
# update data
self.X = X;
self.fX = fX;
self.dim = X.shape[1]
# fit sklearn GP
self.GP.fit(X,fX)
def predict(self, xx, std = False):
# ensure GP is trained
#self.fit(self.X,self.fX);
return self.GP.predict(xx, return_std = std)
def update(self, xx,yy):
""" update gp with new points
xx: 2D-array
yy: 1D-array
"""
self.X = np.vstack((self.X,xx))
self.fX = np.concatenate((self.fX,[yy]))
self.fit(self.X,self.fX)
class GaussianProcessRiskNeutral():
def __init__(self,RiskKernel):
self.dim = 0; # input data dimension
self.X = None; # data points
self.fX = None; # function evals
self.RiskKernel = RiskKernel;
self.GP = GPR(kernel=RiskKernel.GPkernel)
# "fit" a GP to the data
def fit(self, X,fX):
# update data
self.X = X;
self.fX = fX;
self.dim = X.shape[1]
# fit the GP
self.GP.fit(X,fX)
# update the kernel with the tuned hyperparams
self.RiskKernel.updatekernel(self.GP.kernel_)
"""
Predict by:
1. Fit a GP to the data (X,fX)
2. Compute the single and double convolutional kernel
matrices.
3. Predict by computing the posterior GP fhat|fX, the
risk-neutral GP conditioned on the GP on f.
xx: 2d np.array; prediction points
std: Bool; return standard error if True
"""
def predict(self, xx, std = False):
# ensure GP is trained
#self.fit(self.X,self.fX);
# once mollified kernel matrix
Psi = self.RiskKernel.mollifiedx1(self.X, xx)
# twice mollified kernel matrix
Psihat = self.RiskKernel.mollifiedx2(xx)
# compute the predictive mean and covariance
m = Psi.T @ np.linalg.solve(self.GP.L_.T,np.linalg.solve(self.GP.L_,self.fX));
if std is False:
# return the mean
return m
else:
# return mean and standard error
K = Psihat - Psi.T @ np.linalg.solve(self.GP.L_.T,np.linalg.solve(self.GP.L_, Psi))
v = np.sqrt(np.diag(K))
return m, v
# update gp with new points
def update(self, xx,yy):
self.X = np.vstack((self.X,xx))
self.fX = np.concatenate((self.fX,[yy]))
self.fit(self.X, self.fX)
class MCRiskNeutral():
"""Risk Neutral calculated from Monte Carlo on the GP
p: function handle for generating perturbations
p takes in an integer number of points and returns a 2D array
of vectors
num_points_MC: number of points used in Monte Carlo
"""
def __init__(self,kernel,p, num_points_MC = 1000):
self.dim = 0; # input data dimension
self.X = None; # data points
self.fX = None; # function evals
self.GP = GPR(kernel=kernel) # gaussian process
self.p = p # generates perturbations U
self.num_points_MC = num_points_MC # number of points for monte carlo
# "fit" a GP to the data
def fit(self, X,fX):
# update data
self.X = X;
self.fX = fX;
self.dim = X.shape[1]
# fit sklearn GP
self.GP.fit(X,fX)
def predict(self, xx, std = False):
"""predict Ghat(x) = min_alpha G_beta(x,alpha)
xx: 2D array of points
std: Bool
"""
if std == True:
print('')
print("ERROR: MCRiskNeutral has no variance")
quit()
# storage
N = np.shape(xx)[0]
frn = np.zeros(N)
# for each x in xx calculate risk neutral
for i in range(N):
# f(x-U) with Monte Carlo
U = self.p(self.num_points_MC)
frn[i] = np.mean(self.GP.predict(xx[i]-U))
return frn
def update(self, xx,yy):
""" update gp with new points
"""
self.X = np.vstack((self.X,xx))
self.fX = np.concatenate((self.fX,[yy]))
self.fit(self.X,self.fX)
class CVaR():
"""CVaR/VaR surrogate
p: function handle for distribution
p takes in an integer number of points and returns a 2D array
of vectors
beta: confidence level for CVaR/VaR
num_points_MC: number of points used in Monte Carlo
"""
def __init__(self,kernel,p,beta = 0.95, num_points_MC = 1000):
self.dim = 0; # input data dimension
self.X = None; # data points
self.fX = None; # function evals
self.GP = GPR(kernel=kernel) # gaussian process
self.p = p # pdf for U
self.beta = beta # confidence level for CVaR
self.num_points_MC = num_points_MC # number of points for monte carlo
# "fit" a GP to the data
def fit(self, X,fX):
# update data
self.X = X;
self.fX = fX;
self.dim = X.shape[1]
# fit sklearn GP
self.GP.fit(X,fX)
def predict(self, xx, std = False):
"""predict C(x) = min_alpha G_beta(x,alpha)
xx: 2D array of points
std: Bool
"""
if std == True:
print('')
print("ERROR: CVaR has no variance")
quit()
# storage
N = np.shape(xx)[0]
C = np.zeros(N)
# for each x in xx calculate C(x)
for i in range(N):
# f(x+U) with Monte Carlo on surrogate
U = self.p(self.num_points_MC)
S = self.GP.predict(xx[i]+U)
# sort S in ascending order
S.sort()
# compute the index of the minimizer
I = int(np.ceil(self.num_points_MC*self.beta))
# minimizer
VaR = S[I]
# CVaR
C[i] = S[I] + np.sum(S[I+1:]-S[I])/(1.-self.beta)/self.num_points_MC
return C
def update(self, xx,yy):
""" update gp with new points
"""
self.X = np.vstack((self.X,xx))
self.fX = np.concatenate((self.fX,[yy]))
self.fit(self.X,self.fX)
class MeanVariance():
"""Risk Neutral calculated from Monte Carlo on the GP
p: function handle for generating perturbations
p takes in an integer number of points and returns a 2D array
of vectors
beta: confidence level for CVaR/VaR
num_points_MC: number of points used in Monte Carlo
"""
def __init__(self,kernel,p, eta=1.0, num_points_MC = 1000):
self.dim = 0; # input data dimension
self.X = None; # data points
self.fX = None; # function evals
self.GP = GPR(kernel=kernel) # gaussian process
self.p = p # generates perturbations U
self.eta = eta
self.num_points_MC = num_points_MC # number of points for monte carlo
# "fit" a GP to the data
def fit(self, X,fX):
# update data
self.X = X;
self.fX = fX;
self.dim = X.shape[1]
# fit sklearn GP
self.GP.fit(X,fX)
def predict(self, xx, std = False):
"""predict Ghat(x) = min_alpha G_beta(x,alpha)
xx: 2D array of points
std: Bool
"""
if std == True:
print('')
print("ERROR: MCRiskNeutral has no variance")
quit()
# storage
N = np.shape(xx)[0]
meanVar = np.zeros(N)
# for each x in xx calculate mean + eta*variance
for i in range(N):
# f(x-U) with Monte Carlo
U = self.p(self.num_points_MC)
f = self.GP.predict(xx[i]-U)
meanVar[i] = np.mean(f) + self.eta*np.var(f)
return meanVar
def update(self, xx,yy):
""" update gp with new points
"""
self.X = np.vstack((self.X,xx))
self.fX = np.concatenate((self.fX,[yy]))
self.fit(self.X,self.fX)
<file_sep>/examples/BO_MCRiskNeutral.py
"""
Perform Bayesian Optimization with the Risk Neutral
surrogate computed via Monte Carlo on a GP for the
function.
"""
import sys
sys.path.insert(1, '../')
import numpy as np
from bayesopt import BayesianOptimization
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from surrogate import MCRiskNeutral
from strategy import SRBFStrategy
from experimental_design import SymmetricLatinHypercube as SLHC
import matplotlib.pyplot as plt
#=============================================================
# Run Bayesian Optimization
#=============================================================
# basic info
f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
dim = 1
max_evals = 80
Sigma = 0.01*np.eye(dim)
lb = -1.5*np.ones(dim)
ub = 1.5*np.ones(dim)
# experimental design
num_pts = 15*dim + 1 # initial evaluations
exp_design = SLHC(dim, num_pts)
# strategy
strategy = SRBFStrategy(lb,ub)
# uncertainty information
mu = 0.0
sigma = np.sqrt(0.01)
p = lambda num_pts: np.random.normal(mu, sigma, (num_pts,dim))
# Monte Carlo parameters
num_points_MC = 5000
# surrogate
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.1, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
surrogate = MCRiskNeutral(kernel, p, num_points_MC)
# initialize the problem
problem = BayesianOptimization(f,dim, max_evals, exp_design, strategy, surrogate,lb, ub)
# solve it
xopt,fopt = problem.minimize()
#=============================================================
# Plot
#=============================================================
# test points
Ntest = 300;
Xtest = np.linspace(lb,ub,Ntest).reshape((Ntest,dim))
# plot final Mean-Variance surrogate
plt.plot(Xtest.flatten(),surrogate.predict(Xtest),linewidth=2, color='r',label='MC Risk-Neutral')
# plot true function
plt.plot(Xtest.flatten(),f(Xtest.flatten()),linewidth=2, color='b',label='function')
# plot GP
plt.plot(Xtest.flatten(),surrogate.GP.predict(Xtest),linewidth=2, color='orange',label='GP')
# plot solution to Mean Variance Bayesian Optimization
plt.scatter(xopt,fopt,color='red',s = 150,marker=(5,1),label='Best Evaluation')
# plot figure
plt.title('Monte Carlo Risk-Neutral Surrogate')
plt.legend()
plt.show()
<file_sep>/tests/basic_MC_on_function.py
"""
Generate Risk Neutral and Mean-Variance surrogates using
Monte Carlo on the surrogate
"""
import sys
sys.path.insert(1, '../')
import numpy as np
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from surrogate import GaussianProcessRegressor
import matplotlib.pyplot as plt
from MonteCarlo1D_fun import MonteCarlo1D_surrogate
#=============================================================
# Setting
#=============================================================
f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
N = 35
dim = 1
lb = -1.5*np.ones(dim)
ub = 1.5*np.ones(dim)
num_pts_MC = 100
X = np.linspace(lb,ub,N).reshape((N,dim))
fX = f(X).flatten();
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.1, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
surrogate = GaussianProcessRegressor(kernel =kernel,n_restarts_optimizer=3)
surrogate.fit(X,fX)
#=============================================================
# Compute Monte-Carlo evaluations
#=============================================================
eta = 1;
Ntest = 100;
Xtest = np.linspace(lb,ub,Ntest).reshape((Ntest,dim))
# Monte-Carlo on surrogate
ev,va = MonteCarlo1D_surrogate(surrogate,Xtest,Ntest,num_pts_MC)
#=============================================================
# Plot
#=============================================================
# plot true function
ftrue = np.array([f(x) for x in Xtest])
plt.plot(Xtest.flatten(),ftrue,linewidth=2, color='b',label='function')
# plot risk-neutral function
plt.plot(Xtest.flatten(),ev,linewidth=2, color='r',label='risk-neutral')
# plot risk-averse function
plt.plot(Xtest.flatten(),ev+eta*va,linewidth=2, color='orange',label='risk-averse')
# plot GP
m = surrogate.predict(Xtest.reshape((Ntest,dim)), False)
m = m.flatten()
plt.plot(Xtest.flatten(),m,linewidth=2, color='k',label='GP')
# plot figure
plt.title('Monte-Carlo on surrogate')
plt.legend()
plt.show()
<file_sep>/tests/test_MeanVariance_surrogate.py
"""
Plot the CVaR function
"""
import sys
sys.path.insert(1, '../')
import numpy as np
from bayesopt import BayesianOptimization
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
from surrogate import MeanVariance
from strategy import SRBFStrategy
from experimental_design import SymmetricLatinHypercube as SLHC
import matplotlib.pyplot as plt
#=============================================================
# Set up the problem
#=============================================================
f = lambda x: -np.exp(-100*(x-0.8)**2)+np.exp(-2*(x-1)**2)+np.exp(-2*(x+1.5)**2)
# basic info
dim = 1
N = 40
a = -1.5
b = 1.5
X = np.linspace(a,b,N)
fX = f(X)
# probability of U
mu = 0.0
sigma = np.sqrt(0.01)
p = lambda num_pts: np.random.normal(mu, sigma, (num_pts,dim))
num_points_MC = 5000
eta = 0.5
# surrogate
kernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (0.01, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-2))
surrogate = MeanVariance(kernel, p,eta, num_points_MC=num_points_MC)
# fit the surrogate
surrogate.fit(X.reshape((N,dim)),fX)
#=============================================================
# Predict the Mean Variance
#=============================================================
# test points
Ntest = 300
Xtest = np.linspace(a,b,Ntest)
# predict the surrogate
ftest = surrogate.predict(Xtest.reshape((Ntest,dim)), std=False)
plt.plot(Xtest,ftest,linewidth=3,color='orange',label='Mean-Variance')
# plot GP
plt.plot(Xtest,surrogate.GP.predict(Xtest.reshape((Ntest,1))),linewidth=2, color='green',label='GP')
# plot true function
plt.plot(Xtest,f(Xtest),linewidth=3, color='b',label='function')
# plot data
plt.scatter(X,fX,color='k', label='data')
plt.title('Mean-Variance')
plt.legend()
plt.show()
<file_sep>/riskkernel.py
import numpy as np
from abc import ABC, abstractmethod
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.gaussian_process.kernels import WhiteKernel, ConstantKernel, RBF
import scipy.spatial.distance
from sklearn.metrics.pairwise import euclidean_distances
# kernels class
class Kernel(ABC):
"""Base class for a radial kernel.
:ivar order: Order of the conditionally positive definite kernel
"""
def __init__(self): # pragma: no cover
self.hyperparams = []
@abstractmethod
def updatekernel(self, kernel): # pragma: no cover
"""Evaluate the radial kernel.
:param dists: Array of size n x n with pairwise distances
:type dists: numpy.ndarray
:return: Array of size n x n with kernel values
:rtype: numpy.ndarray
"""
pass
@abstractmethod
def get_hyperparameters(self):
""" Return the hyperparameters as a list"""
pass
@abstractmethod
def mollifiedx1(self, X,Y): # pragma: no cover
"""Once mollified kernel
:param dists: Array of size n x n with pairwise distances
:type dists: numpy.ndarray
:return: Array of size n x n with kernel derivatives
:rtype: numpy.ndarray
"""
pass
@abstractmethod
def mollifiedx2(self, X): # pragma: no cover
"""Twice mollified kernel
:param dists: Array of size n x n with pairwise distances
:type dists: numpy.ndarray
:return: Array of size n x n with kernel derivatives
:rtype: numpy.ndarray
"""
pass
class Normal_SEKernel(Kernel):
""" kernels for Normal pdf convolved with Squared Exponential kernel.
:math:`\\varphi(r) = \\exp(\\-theta_2^2||x-x'||^2) + \\theta_3^2\\delta_{ii'}` which is
positive definite.
"""
def __init__(self, Sigma):
super().__init__()
# squared exponential kernel for GP
self.GPkernel = ConstantKernel(1, (1e-3, 1e3)) * RBF(1, (1e-3, 100)) + \
WhiteKernel(1e-3, (1e-6, 1e-1))
self.Sigma = Sigma
self.dim = Sigma.shape[1]
def updatekernel(self,kernel):
""" update the GPkernel with a new kernel
"""
self.GPkernel = kernel
def get_hyperparameters(self):
""" get hyperparameters from GPkernel
"""
theta0 = self.GPkernel.get_params()['k1__k1__constant_value']
theta1 = self.GPkernel.get_params()['k1__k2__length_scale']
theta2 = self.GPkernel.get_params()['k2__noise_level']
return np.array([theta0,theta1,theta2])
def mollifiedx1(self, X,Y):
""" Once mollified kernel
"""
# get optimized hyperparameters
theta = self.get_hyperparameters()
# A + 2Sigma
A = (theta[1]**2)*np.eye(self.dim);
B = A + self.Sigma
Binv = np.linalg.inv(B); # inv(A + Sigma)
# constant
c = theta[0]*np.sqrt(np.linalg.det(A)/np.linalg.det(B))
# kernel
kernel = lambda i,j: c*np.exp(-0.5*(X[i]-Y[j])@Binv@(X[i]-Y[j]));
# kernel
g = np.vectorize(kernel)
# make kernel matrix
K = np.fromfunction(g,(len(X),len(Y)),dtype=int);
return K
def mollifiedx2(self, X):
""" Twice mollified kernel
"""
# get optimized hyperparameters
theta = self.get_hyperparameters()
# A + 2Sigma
A = (theta[1]**2)*np.eye(self.dim);
B = A + 2*self.Sigma
Binv = np.linalg.inv(B); # inv(A + 2Sigma)
# constant
c = theta[0]*np.sqrt(np.linalg.det(A)/np.linalg.det(B))
# kernel
kernel = lambda i,j: c*np.exp(-0.5*(X[i]-X[j])@Binv@(X[i]-X[j]));
g = np.vectorize(kernel)
# make kernel matrix
K = np.fromfunction(g,(len(X),len(X)),dtype=int) + (theta[2])*np.eye(len(X))
return K
| 677c42f2fa47376ef53619326e01ac9b2e11f0fb | [
"Python"
] | 15 | Python | mishapadidar/GP-Risk-Neutral | b5e84bdad4e99ee4b6734bd68667b9b62b9d3657 | cc5d8c5def250fb3754b8770599285c271cf90c7 |
refs/heads/master | <repo_name>ericsims/2013CTSC<file_sep>/show/track/components/socketServer.js
//Require HTTP module (to start server) and Socket.IO
var http = require('http'), io = require('socket.io');
var fs = require('fs');
//Start the server at port 8080
var server = http.createServer(function(req, res){
fs.readFile(__dirname + '/socketServer.html', function(err, data) {
// Send HTML headers and message
res.writeHead(200,{ 'Content-Type': 'text/html' });
res.end(data);
});
});
server.listen(5001);
//Create a Socket.IO instance, passing it our server
var socket = io.listen(server);
//Add a connect listener
socket.on('connection', function (socket) {
socket.emit('news', { hello: 'world' });
socket.on('my other event', function (data) {
console.log(data);
});
});<file_sep>/show/track/components/detection.js
var cv = require('opencv');
var PNG = require('png.js');
var draw = require('./draw');
var http = require('http');
var server = require('./mjpeg-stream');
var lower_threshold = [0, 0, 0];
var upper_threshold = [0, 0, 0];
var XYZ;
exports.readImage = function readImage(data, settings, index, adjustWhiteBalance){
var target = settings['target'+index];
if(target){
if(adjustWhiteBalance) {
var reader = new PNG(data);
reader.parse(function(err, png){
if (err) throw err;
var whiteBalance = 0;
calculateWhiteBalance(png, whiteBalance, settings, target);
cv.readImage(data, function(err, im){
XYZ = exports.cvProcess(err, im, settings, target);
});
});
} else {
lower_threshold = [target.color[0] - target.threshold,
target.color[1] - target.threshold,
target.color[2] - target.threshold];
upper_threshold = [target.color[0] + target.threshold,
target.color[1] + target.threshold,
target.color[2] + target.threshold];
cv.readImage(data, function(err, im){
XYZ = exports.cvProcess(err, im, settings, target);
});
}
return XYZ;
} else {
cv.readImage(data, function(err, im){
XYZ = exports.cvProcess(err, im, settings, target);
});
return [-1, -1, -1];
}
};
function calculateWhiteBalance(png, whiteBalance, settings, target){
if(settings.debug){
console.log('png.width: ' + png. width);
console.log('png.height: ' + png. height);
}
var usageFrequency = 10;
var totalPixels = (png.width * png.height) / (usageFrequency * usageFrequency);
for (var x = 0; x <= png.width - usageFrequency; x += usageFrequency){
for (var y = 0; y <= png.height - usageFrequency; y += usageFrequency){
var average = (png.getPixel(x,y)[0] + png.getPixel(x,y)[1] + png.getPixel(x,y)[2]) / 3;
whiteBalance += average;
}
}
whiteBalance = whiteBalance / totalPixels;
var whiteBalanceAdjust = whiteBalance / target.whiteBalance;
if(settings.debug){
console.log('whiteBalance: ' + whiteBalance);
console.log('target.color: ' + target.color);
console.log('settings.targetx.threshold: ' + target.threshold);
console.log('whiteBalanceAdjust: ' + whiteBalanceAdjust);
}
lower_threshold = [(target.color[0] * whiteBalanceAdjust) - target.threshold,
(target.color[1] * whiteBalanceAdjust) - target.threshold,
(target.color[2] * whiteBalanceAdjust) - target.threshold];
upper_threshold = [(target.color[0] * whiteBalanceAdjust) + target.threshold,
(target.color[1] * whiteBalanceAdjust) + target.threshold,
(target.color[2] * whiteBalanceAdjust) + target.threshold];
if(settings.debug){
console.log(lower_threshold);
console.log(upper_threshold);
}
};
exports.cvProcess = function cvProcess(err, im_orig, settings, target) {
var big = im_orig.copy();
var im = im_orig.copy();
if(target){
if(settings.opencv.saveFiles){
im.save('./matrix.png');
if(settings.debug){
console.log('matrix.png saved');
}
}
if(target)
im.inRange(lower_threshold, upper_threshold);
if(settings.opencv.saveFiles){
im.save('./color.png');
if(settings.debug){
console.log('color.png saved');
}
}
im.canny(settings.opencv.lowThresh, settings.opencv.highThresh);
im.dilate(target.nIters);
if(settings.opencv.saveFiles){
im.save('./canny.png');
if(settings.debug){
console.log('canny.png saved');
}
}
var contours = im.findContours();
if(settings.debug){
console.log('found contours: ' + contours.size());
console.log('settings.targetx.minArea: ' + target.minArea);
}
var largest_blob = -1;
if (contours.size() > 0) {
for(i = 0; i < contours.size(); i++) {
var area = contours.area(i);
if(area > target.minArea){
if(largest_blob != -1) {
if(area > contours.area(largest_blob)) {
largest_blob=i;
}
} else {
largest_blob = i;
}
}
}
if(settings.debug){
console.log('largest_blob: ' + largest_blob);
}
if(largest_blob != -1) {
var current = contours.boundingRect(largest_blob);
if(current.x == 1 || current.x == settings.opencv.width
|| current.y == 1 || current.y == settings.opencv.height){
largest_blob = -1;
}
}
if(largest_blob != -1) {
var center = getCenter(current.x, current.y, current.width, current.height, settings);
if(settings.debug){
console.log('center: ' + center);
}
var distance = target.dissize / ( Math.sqrt(contours.area(largest_blob)) );
}
}
if(settings.debug){
if(largest_blob != -1) {
console.log(center[0] + ', ' + center[1]);
} else {
console.log('no target found');
}
}
if(contours.size() > 0){
big.drawAllContours(contours, settings.WHITE);
}
if (largest_blob != -1){
big.drawContour(contours, largest_blob, settings.BLUE);
draw.drawBoundingRect(big, current, settings.RED);
draw.drawCenter(big, current, settings.RED, getCenter);
}
if(settings.opencv.saveFiles){
big.save('./big.png');
if(settings.debug){
console.log('big.png saved');
}
}
server.update(big.toBuffer());
if(largest_blob != -1) {
return [center[0], center[1], distance];
} else {
return [-1, -1, -1];
}
} else {
server.update(big.toBuffer());
return [-1, -1, -1];
}
};
function getCenter(x, y, width, height, settings) {
var center_x = x + width/2;
var center_y = y + height/2;
return [center_x, center_y];
};<file_sep>/show/balloonPop/components/draw.js
/**
* Draw a rectangle around the detected contour
*
* @param Matrix im Matrix image to draw lines on
* @param array contours Array of contours returned from canny.findContours
* @param int index Index in array to draw
* @param array Array of B,G,R int values of a color to draw with
*/
exports.drawBoundingRect = function(im, contours, index, color) {
var firstCorner = [contours.boundingRect(index).x, contours.boundingRect(index).y];
var secondCorner = [contours.boundingRect(index).x + contours.boundingRect(index).width, contours.boundingRect(index).y];
var thirdCorner = [contours.boundingRect(index).x, contours.boundingRect(index).y + contours.boundingRect(index).height];
var fourthCorner = [contours.boundingRect(index).x + contours.boundingRect(index).width, contours.boundingRect(index).y + contours.boundingRect(index).height]
im.line(firstCorner, secondCorner, color);
im.line(secondCorner, fourthCorner, color);
im.line(fourthCorner, thirdCorner, color);
im.line(thirdCorner, firstCorner, color);
}
/**
* Draw a ellipse on the center of a contour
*
* @param Matrix im Matrix image to draw lines on
* @param array contours Array of contours returned from canny.findContours
* @param int index Index in array to draw
* @param array Array of B,G,R int values of a color to draw with
*/
exports.drawCenter = function(im, contours, index, color, getCenter) {
var center = getCenter(
contours.boundingRect(index).x,
contours.boundingRect(index).y,
contours.boundingRect(index).width,
contours.boundingRect(index).height
);
im.ellipse(center[0], center[1], 3, 3, color);
}<file_sep>/show/balloonPop/server.js
var vapix = require('vapix');
var net = require('net');
var cv = require('opencv');
var http = require('http');
var fs = require('fs');
start();
function start(options) {
tcpStart();
}
function tcpStart() {
var server = net.createServer(function(socket) {
console.log('tcp: connected');
this.on('data', function(data) {
self.sendValue(data, socket);
});
socket.on('end', function() {
console.log('tcp: disconneced');
});
socket.on('error', function() {
console.log('tcp: Socket error occured');
});
});
var port = 8080;
server.listen(port, function() {
console.log('tcp: bound to port ' + port);
});
}
function sendValue(data, socket) {
var self = this;
if (0) {
} else {
socket.write('not found');
}
}
function sleep(milliSeconds) {
var startTime = new Date().getTime();
while (new Date().getTime() < startTime + milliSeconds);
}
exports.start = start;<file_sep>/ar-drone-Navdata.js
var arDrone = require('ar-drone');
var client = arDrone.createClient({ip: '192.168.1.10'});
client.config('control:altitude_max', 1500);
client.config('');
client.on('navdata', navdata);
var alt;
var yaw;
function navdata(data){
var datastr = JSON.stringify(data);
var split2 = (datastr.substring(datastr.indexOf('demo'), datastr.indexOf('detection'))).split(',');
for(var j=0; j<split2.length; j++){
console.log(split2[j]);
}
var split = datastr.split(',');
for(var i=0; i<split.length; i++){
var current = split[i];
if (current.indexOf('\"z\"') != -1 && current.indexOf('}}}}') != -1){
alt = -parse(current);
}
if (current.indexOf('yaw') != -1){
yaw = -parse(current);
}
}
if(go){
setAlt(1000);
//setYaw(0);
}
//process.exit(0);
}
function setAlt(setalt){
console.log('alt: ' + alt);
if(setalt > alt){
client.up(0.1);
} else {
client.down(0.1);
}
}
function setYaw(setyaw){
setyaw = Math.abs(setyaw - yaw);
if(yaw > setyaw){
client.clockwise(0.05);
console.log(yaw + "\tcw");
} else {
client.counterClockwise(0.05);
console.log(yaw + "\tccw");
}
}
function parse(current){
return parseFloat(current.substring(current.indexOf(':') + 1, current.length));
}
var go = false;
//client.takeoff();
client
.after(5000, function() {
go = true;
})
.after(15000, function() {
go = false;
this.stop();
this.land();
})
.after(5000, function() {
process.exit(0);
});
<file_sep>/onboard/serialRXLight/serialRXLight.ino
void setup() {
// initialize serial:
Serial.begin(9600);
pinMode(13, OUTPUT);
}
void loop() {
while (Serial.available() > 0) {
int value = Serial.parseInt();
Serial.write(value+'\n');
if (Serial.read() == '\n') {
digitalWrite(13, value);
}
}
}
<file_sep>/README.md
2013CTSC
========
2013 Connecticut Science Center AR.Drone Code for robotics theater show
Wiki and Instructions:
https://github.com/ericsims/2013CTSC/wiki
Recommended System:
* wifi capability
* minimum 512MB ram (2GB recommended)
* minimum 1GHz Processor
* minimum 100MB free space
* Ubuntu 14.04 or greater
<file_sep>/binding.gyp
{
"targets": [{
"target_name": "opencv"
, "sources": [
"node_modules/opencv/src/init.cc"
, "node_modules/opencv/src/Matrix.cc"
, "node_modules/opencv/src/OpenCV.cc"
, "node_modules/opencv/src/CascadeClassifierWrap.cc"
, "node_modules/opencv/src/Contours.cc"
, "node_modules/opencv/src/Point.cc"
, "node_modules/opencv/src/VideoCaptureWrap.cc"
, "node_modules/opencv/src/CamShift.cc"
, "node_modules/opencv/src/HighGUI.cc"
, "node_modules/opencv/src/FaceRecognizer.cc"
]
, "conditions": [
['OS=="win"', { #windows needs include dirs passed to MSBUILD this way
'include_dirs': [
'<!@(pkg-config --cflags "opencv >= 2.3.1" )'
],
}],
['OS=="mac"', {
# cflags on OS X are stupid and have to be defined like this
'xcode_settings': {
'OTHER_CFLAGS': [
'<!@(pkg-config --cflags opencv)'
]
, "GCC_ENABLE_CPP_RTTI": "YES"
, "GCC_ENABLE_CPP_EXCEPTIONS": "YES"
}
}]
]
, 'libraries': [
'<!@(pkg-config --libs opencv)'
]
, 'cflags': [
'<!@(pkg-config --cflags --libs "opencv >= 2.3.1" )'
,'-Wall'
]
, 'cflags!' : [ '-fno-exceptions']
, 'cflags_cc!': [ '-fno-rtti', '-fno-exceptions']
}]
}
<file_sep>/ardronekill.js
var net = require('net');
var client = net.connect(23, '192.168.1.10', function(){
client.on('data', function(data) {
console.log('data:', data.toString());
});
client.on('error', function(err) {
console.log('error:', err.message);
});
client.write('reboot\n');
client.write('exit\n');
process.exit();
});<file_sep>/show/balloonPop/components/detection.js
var cv = require('opencv');
var PNG = require('png.js');
var draw = require('./draw');
var lower_threshold = [0, 0, 0];
var upper_threshold = [0, 0, 0];
var XY;
exports.readImage = function readImage(data, settings){
var reader = new PNG(data);
reader.parse(function(err, png){
if (err) throw err;
var whiteBalance = 0;
calculateWhiteBalance(png, whiteBalance, settings);
cv.readImage(data, function(err, im){
XY = exports.cvProcess(err, im, settings);
});
});
return XY;
};
function calculateWhiteBalance(png, whiteBalance, settings){
if(settings.debug){
console.log('png.width: ' + png. width);
console.log('png.height: ' + png. height);
}
var totalPixels = png.width * png.height;
for (var x = 0; x < png.width; x++){
for (var y = 0; y < png.height; y++){
var average = (png.getPixel(x,y)[0] + png.getPixel(x,y)[1] + png.getPixel(x,y)[2]) / 3;
whiteBalance += average;
}
}
whiteBalance = whiteBalance / totalPixels;
var whiteBalanceAdjust = whiteBalance / settings.target1.whiteBalance;
if(settings.debug){
console.log('whiteBalance: ' + whiteBalance);
console.log('settings.target1.color: ' + settings.target1.color);
console.log('settings.opencv.threshold: ' + settings.opencv.threshold);
console.log('whiteBalanceAdjust: ' + whiteBalanceAdjust);
}
lower_threshold = [(settings.target1.color[0] * whiteBalanceAdjust) - settings.opencv.threshold,
(settings.target1.color[1] * whiteBalanceAdjust) - settings.opencv.threshold,
(settings.target1.color[2] * whiteBalanceAdjust) - settings.opencv.threshold];
upper_threshold = [(settings.target1.color[0] * whiteBalanceAdjust) + settings.opencv.threshold,
(settings.target1.color[1] * whiteBalanceAdjust) + settings.opencv.threshold,
(settings.target1.color[2] * whiteBalanceAdjust) + settings.opencv.threshold];
if(settings.debug){
console.log(lower_threshold);
console.log(upper_threshold);
}
};
exports.cvProcess = function cvProcess(err, im_orig, settings) {
var big = im_orig;
var im = im_orig;
im.convertGrayscale();
if(settings.opencv.saveFiles){
im.save('./matrix.png');
if(settings.debug){
console.log('matrix.png saved');
}
}
//im.inRange(lower_threshold, upper_threshold);
if(settings.opencv.saveFiles){
im.save('./color.png');
if(settings.debug){
console.log('color.png saved');
}
}
im.canny(settings.opencv.lowThresh, settings.opencv.highThresh);
im.dilate(settings.opencv.nIters);
if(settings.opencv.saveFiles){
im.save('./canny.png');
if(settings.debug){
console.log('canny.png saved');
}
}
var contours = im.findContours();
if(settings.debug){
console.log('found contours: ' + contours.size());
console.log('settings.opencv.minArea: ' + settings.opencv.minArea);
}
var largest_blob = -1;
if (contours.size() > 0){
for(i = 0; i < contours.size(); i++) {
var area = contours.area(i);
if(area > settings.opencv.minArea){
if(largest_blob != -1) {
if(area > contours.area(largest_blob)) {
largest_blob=i;
}
} else {
largest_blob = i;
}
}
}
if(largest_blob != -1) {
var current = contours.boundingRect(largest_blob);
if(current.x == 1 || current.x == settings.opencv.width
|| current.y == 1 || current.y == settings.opencv.height){
largest_blob = -1;
}
}
}
if(settings.debug){
if(largest_blob != -1) {
console.log(current.x + ', ' + current.y);
} else {
console.log('no target found');
}
}
if(settings.opencv.saveFiles){
if(largest_blob != -1) {
if (contours.size() > 0){
big.drawAllContours(contours, settings.WHITE);
draw.drawCenter(big, contours, largest_blob, settings.RED, getCenter);
}
big.save('./big.png');
if(settings.debug){
console.log('big.png saved');
}
big;
return [current.x, current.y];
} else {
return;
}
}
};
function getCenter(x, y, width, height) {
var center_x = x + width/2;
var center_y = y + height/2;
return [center_x, center_y];
};<file_sep>/show/show.js
require('js-yaml');
var fs = require('fs');
var ardrone = require('ar-drone');
var vapix = require('vapix');
var sys = require("sys");
var exec = require('child_process').exec;
var settings = require('./track/config/settings.yaml');
var detection = require('./track/components/detection');
var actions = require('./track/components/actions');
var stdin = process.openStdin();
var target = -1;
var follow = false;
var localize = false;
var streamSource = 'ardrone';
process.argv.forEach(function (val, index, array) {
if( index == 2 ){
target = val;
}
});
if(target < 0){
console.log('Please set a target number!');
}
if(settings.debug){
console.log('settings.ardrone.ip1: ' + settings.ardrone.ip1);
}
var client = ardrone.createClient({ip: settings.ardrone.ip1});
client.config('control:altitude_max', 1000);
var pngStream = client.getPngStream();
/*var camera = new vapix.Camera({
address: settings.camera.ip,
port: settings.camera.port,
username: settings.camera.username,
password: <PASSWORD>.camera.<PASSWORD>
});*/
var mjpg;
/*exec("ping -c 3 " + settings.camera.ip, function (error, stdout, stderr) {
if(stdout.indexOf("0% packet loss") != -1){
console.log('network camera found');
mjpg = camera.createVideoStream({
resolution: settings.camera.resolution,
compression: settings.camera.compression,
fps: settings.camera.fps
});
} else {
console.log('network camera NOT found!!!');
}
});*/
pngStream.on('data', function(data){
if(streamSource == 'ardrone'){
var XYZ = detection.readImage(data, settings, target, true);
if(XYZ && follow){
if(XYZ[0] != -1 && XYZ[1] != -1 && XYZ[2] != -1){
if(settings.debug){
console.log(XYZ);
}
actions.centerTarget(XYZ, settings, client);
} else {
console.log('stop');
client.stop();
}
}
}
});
if(mjpg){
mjpg.on('data', function(data) {
if(streamSource == 'vapix'){
var XYZ = detection.readImage(data, settings, target, false);
if(XYZ && follow){
if(XYZ[0] != -1 && XYZ[1] != -1 && XYZ[2] != -1){
if(settings.debug){
console.log(XYZ);
}
actions.centerTarget(XYZ, settings, client);
} else {
console.log('stop');
client.stop();
}
}
}
});
}
stdin.addListener("data", function(data) {
data = data.toString().substring(0, data.length-1);
console.log("you entered: [" + data + "]");
if(data == 'takeoff'){
takeoff();
} else if(data == 'land'){
land();
} else if(data == 'calibrate'){
actions.calibrate(client);
} else if(data == 'hover'){
actions.hover(client);
} else if (data.indexOf('set streamSource') != -1) {
streamSource = data.substring(17);
console.log('steamSource is now: ' + streamSource);
} else if (data.indexOf('set target') != -1) {
target = parseInt(data.substring(10));
console.log('target is now: ' + target);
} else if (data == 'start track') {
if (target > -1){
follow = true;
console.log('tracking started');
} else {
console.log('target not defined!');
}
} else if (data == 'stop track') {
follow = false;
console.log('tracking stopped');
} else if (data == 'exit') {
process.exit();
}
});
function takeoff(){
client.takeoff();
}
function land(){
client.stop();
client.land();
}
<file_sep>/onboard/testServer.js
var net = require('net');
var server = net.createServer(function(c) { //'connection' listener
console.log('server connected');
c.on('end', function() {
console.log('server disconnected');
});
c.on('data', function(data) {
if(data.toString().indexOf("heat") !== -1){
value = parseInt(data.toString().substring(4));
console.log(value);
c.write('on\n');
}
});
});
server.listen(8124, function() { //'listening' listener
console.log('server bound');
}); | a326f53b47db2d20cd09432a307040de664f15f9 | [
"JavaScript",
"Python",
"C++",
"Markdown"
] | 12 | JavaScript | ericsims/2013CTSC | bbd20cafd986d228a339a5d5182f45a67866d167 | 617ba79616200c3d063424f8c653b5f529fe4a94 |
refs/heads/main | <repo_name>oscarterah/yolocv<file_sep>/CMakeLists.txt
cmake_minimum_required(VERSION 3.12)
project(osc)
find_package(OpenCV REQUIRED)
include_directories(${OpenCV_INCLUDE_DIRS})
add_executable(osc main.cpp)
target_link_libraries(osc ${OpenCV_LIBS})
<file_sep>/main.cpp
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/imgproc.hpp>
#include <fstream>
#include <iostream>
#include <algorithm>
#include <cstdlib>
using namespace cv;
using namespace std;
using namespace dnn;
int main()
{
cout<<CV_VERSION;
VideoCapture cap;
cap.open(0);
std::string model = "yolov2.weights";
std::string config = "yolov2.cfg";
Net net = readNet(model, config,"Darknet");
net.setPreferableBackend(DNN_BACKEND_DEFAULT);
net.setPreferableTarget(DNN_TARGET_OPENCL);
vector<string> classNamesVec;
ifstream classNamesFile("coco.names");
if (classNamesFile.is_open())
{
string className = "";
while (std::getline(classNamesFile, className))
classNamesVec.push_back(className);
}
for (;;)
{
if (!cap.isOpened()) {
cout << "Video Capture Fail" << endl;
break;
}
Mat frame;
cap >> frame;
Mat inputBlob = blobFromImage(frame, 1 / 255.F, Size(416, 416), Scalar(), true, false);
net.setInput(inputBlob, "data");
Mat detectionMat;
net.forward(detectionMat);
vector<double> layersTimings;
double freq = getTickFrequency() / 1000;
double time = net.getPerfProfile(layersTimings) / freq;
ostringstream ss;
ss << "FPS: " << 1000 / time << " ; time: " << time << " ms";
putText(frame, ss.str(), Point(20, 20), 0, 0.5, Scalar(0, 0, 255));
std::cout << time << std::endl;
float confidenceThreshold = 0.1; // rows represent number of detected object (proposed region)
for (int i = 0; i < detectionMat.rows; i++)
{
const int probability_index = 5;
const int probability_size = detectionMat.cols - probability_index;
float *prob_array_ptr = &detectionMat.at<float>(i, probability_index);
size_t objectClass = max_element(prob_array_ptr, prob_array_ptr + probability_size) - prob_array_ptr;
float confidence = detectionMat.at<float>(i, (int)objectClass + probability_index);
if (confidence > confidenceThreshold)
{
float x = detectionMat.at<float>(i, 0);
float y = detectionMat.at<float>(i, 1);
float width = detectionMat.at<float>(i, 2);
float height = detectionMat.at<float>(i, 3);
int xLeftBottom = static_cast<int>((x - width / 2) * frame.cols);
int yLeftBottom = static_cast<int>((y - height / 2) * frame.rows);
int xRightTop = static_cast<int>((x + width / 2) * frame.cols);
int yRightTop = static_cast<int>((y + height / 2) * frame.rows);
Rect object(xLeftBottom, yLeftBottom,
xRightTop - xLeftBottom,
yRightTop - yLeftBottom);
rectangle(frame, object, Scalar(0, 255, 0));
if (objectClass < classNamesVec.size())
{
ss.str("");
ss << confidence;
String conf(ss.str());
String label = String(classNamesVec[objectClass]) + ": " + conf;
int baseLine = 0;
Size labelSize = getTextSize(label, FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine);
rectangle(frame, Rect(Point(xLeftBottom, yLeftBottom),
Size(labelSize.width, labelSize.height + baseLine)),
Scalar(255, 255, 255), FILLED);
putText(frame, label, Point(xLeftBottom, yLeftBottom + labelSize.height),
FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0, 0, 0));
}
else
{
cout << "Class: " << objectClass << endl;
cout << "Confidence: " << confidence << endl;
cout << " " << xLeftBottom
<< " " << yLeftBottom
<< " " << xRightTop
<< " " << yRightTop << endl;
}
}
}
imshow("YOLO: Detections", frame);
if (waitKey(1) >= 0) break;
}
return 0;
}
| 55432392bac85994050487f0ce0ea9d3e2f1c335 | [
"CMake",
"C++"
] | 2 | CMake | oscarterah/yolocv | e0ce477bf3fdc13d3a011fc49ff2e5e2cd817baa | 8e076db812bb6541943a9a4c3a5c5ff1a81f3c90 |
refs/heads/master | <repo_name>sushiljain1989/yeoman-ui<file_sep>/backend/src/extension.ts
import * as fsextra from 'fs-extra';
import * as _ from 'lodash';
import * as path from 'path';
import * as vscode from 'vscode';
import { YeomanUI } from "./yeomanui";
import {RpcExtension} from '@sap-devx/webview-rpc/out.ext/rpc-extension';
import { YouiLog } from "./youi-log";
import { OutputChannelLog } from './output-channel-log';
import { YouiEvents } from "./youi-events";
import { VSCodeYouiEvents } from './vscode-youi-events';
import { GeneratorFilter } from './filter';
import backendMessages from "./messages";
import { getClassLogger, createExtensionLoggerAndSubscribeToLogSettingsChanges } from "./logger/logger-wrapper";
import { IChildLogger } from "@vscode-logging/logger";
const ERROR_ACTIVATION_FAILED_LOGGER_CONFIG = 'Extension activation failed due to Logger configuration failure:';
export function activate(context: vscode.ExtensionContext) {
try {
createExtensionLoggerAndSubscribeToLogSettingsChanges(context);
} catch (error) {
console.error(ERROR_ACTIVATION_FAILED_LOGGER_CONFIG, error.message);
return;
}
context.subscriptions.push(
vscode.commands.registerCommand('loadYeomanUI', (options?: any) => {
const filter = GeneratorFilter.create(_.get(options, "filter"));
const messages = _.get(options, "messages");
const displayedPanel = _.get(YeomanUIPanel, "currentPanel.panel");
if (displayedPanel) {
displayedPanel.dispose();
}
YeomanUIPanel.create(context.extensionPath, filter, messages);
}));
context.subscriptions.push(
vscode.commands.registerCommand('yeomanUI.toggleOutput', () => {
const yeomanUi = _.get(YeomanUIPanel, "currentPanel.yeomanui");
if (yeomanUi) {
yeomanUi.toggleOutput();
}
}));
if (vscode.window.registerWebviewPanelSerializer) {
vscode.window.registerWebviewPanelSerializer(YeomanUIPanel.viewType, {
async deserializeWebviewPanel(webviewPanel: vscode.WebviewPanel, state: any) {
console.log(`Got state: ${state}`);
YeomanUIPanel.genFilter = GeneratorFilter.create(_.get(state, "filter"));
YeomanUIPanel.messages = _.assign({}, backendMessages, _.get(state, "messages", {}));
YeomanUIPanel.revive(webviewPanel, context.extensionPath);
}
});
}
}
/**
* Manages webview panels
*/
export class YeomanUIPanel {
/**
* Track the currently panel. Only allow a single panel to exist at a time.
*/
public static readonly viewType = 'yeomanui';
public static currentPanel: YeomanUIPanel | undefined;
public static genFilter: GeneratorFilter;
public static messages: any;
public static create(extensionPath: string, filter?: GeneratorFilter, messages: any = {}) {
YeomanUIPanel.genFilter = GeneratorFilter.create(filter);
YeomanUIPanel.messages = _.assign({}, backendMessages, messages);
// Otherwise, create a new panel.
const panel = vscode.window.createWebviewPanel(
YeomanUIPanel.viewType,
'Yeoman UI',
vscode.ViewColumn.One,
{
// Enable javascript in the webview
enableScripts: true,
retainContextWhenHidden : true,
// And restrict the webview to only loading content from our extension's `media` directory.
localResourceRoots: [vscode.Uri.file(YeomanUIPanel.getMediaPath(extensionPath))]
}
);
YeomanUIPanel.currentPanel = new YeomanUIPanel(panel, extensionPath);
}
public static revive(panel: vscode.WebviewPanel, extensionPath: string) {
YeomanUIPanel.currentPanel = new YeomanUIPanel(panel, extensionPath);
}
private static getMediaPath(extensionPath: string): string {
return path.join(extensionPath, 'dist', 'media');
}
public async showOpenFileDialog(currentPath: string): Promise<string> {
return await this.showOpenDialog(currentPath, true);
}
public async showOpenFolderDialog(currentPath: string): Promise<string> {
return await this.showOpenDialog(currentPath, false);
}
private async showOpenDialog(currentPath: string, canSelectFiles: boolean): Promise<string> {
const canSelectFolders: boolean = !canSelectFiles;
let uri;
try {
uri = vscode.Uri.file(currentPath);
} catch (e) {
uri = vscode.Uri.file('/');
}
try {
const filePath = await vscode.window.showOpenDialog({
canSelectFiles,
canSelectFolders,
defaultUri: uri
});
return _.get(filePath, "[0].fsPath");
} catch (error) {
return currentPath;
}
}
public yeomanui: YeomanUI;
private readonly logger: IChildLogger = getClassLogger(YeomanUI.name);
private rpc: RpcExtension;
private readonly extensionPath: string;
private disposables: vscode.Disposable[] = [];
private constructor(public readonly panel: vscode.WebviewPanel, extensionPath: string) {
this.extensionPath = extensionPath;
this.rpc = new RpcExtension(this.panel.webview);
const outputChannel: YouiLog = new OutputChannelLog();
const vscodeYouiEvents: YouiEvents = new VSCodeYouiEvents(this.rpc, this.panel, YeomanUIPanel.genFilter);
this.yeomanui = new YeomanUI(this.rpc, vscodeYouiEvents, outputChannel, this.logger, YeomanUIPanel.genFilter);
this.yeomanui.registerCustomQuestionEventHandler("file-browser", "getFilePath", this.showOpenFileDialog.bind(this));
this.yeomanui.registerCustomQuestionEventHandler("folder-browser", "getPath", this.showOpenFolderDialog.bind(this));
// Set the webview's initial html content
this._update();
this.panel.onDidDispose(() => this.dispose(), null, this.disposables);
}
public dispose() {
YeomanUIPanel.currentPanel = undefined;
// Clean up our resources
this.panel.dispose();
while (this.disposables.length) {
const x = this.disposables.pop();
if (x) {
x.dispose();
}
}
}
private setState(options: any): Promise<void> {
return this.rpc ? this.rpc.invoke("setState", [options]) : Promise.resolve();
}
private async _update() {
let indexHtml: string = await fsextra.readFile(path.join(YeomanUIPanel.getMediaPath(this.extensionPath), 'index.html'), "utf8");
if (indexHtml) {
// Local path to main script run in the webview
const scriptPathOnDisk = vscode.Uri.file(path.join(YeomanUIPanel.getMediaPath(this.extensionPath), path.sep));
const scriptUri = this.panel.webview.asWebviewUri(scriptPathOnDisk);
// TODO: very fragile: assuming double quotes and src is first attribute
// specifically, doesn't work when building vue for development (vue-cli-service build --mode development)
indexHtml = indexHtml.replace(/<link href=/g, `<link href=${scriptUri.toString()}`);
indexHtml = indexHtml.replace(/<script src=/g, `<script src=${scriptUri.toString()}`);
indexHtml = indexHtml.replace(/<img src=/g, `<img src=${scriptUri.toString()}`);
}
const messages = YeomanUIPanel.messages;
const filter = YeomanUIPanel.genFilter;
this.panel.title = _.get(messages, "panel_title");
this.panel.webview.html = indexHtml;
await this.setState({messages, filter});
}
}
let channel: vscode.OutputChannel;
export function getOutputChannel(): vscode.OutputChannel {
if (!channel) {
channel = vscode.window.createOutputChannel('Yeoman UI');
}
return channel;
}
| 4517daa0208a01c74302b467a1d57f9d11cffb92 | [
"TypeScript"
] | 1 | TypeScript | sushiljain1989/yeoman-ui | a9e2438dac9e9dcd3002c620fcd2586d0bf61899 | b287b029f09bd14956b76547fca8d2d19747de2f |
refs/heads/master | <file_sep>OBS_PROJECT := EA4
OBS_PACKAGE := ea-tomcat85
DISABLE_BUILD += repository=CentOS_8 repository=CentOS_9
include $(EATOOLS_BUILD_DIR)obs.mk
<file_sep>. $HOME/ea-tomcat85/bin/setenv.sh
$CATALINA_HOME/bin/shutdown.sh
<file_sep>. /opt/cpanel/ea-tomcat85/bin/user-functions
. $HOME/ea-tomcat85/bin/setenv.sh
tomcat_pid() {
if [ -n "$CATALINA_PID" ] && [ -e $CATALINA_PID ]; then
cat $CATALINA_PID
fi
}
ERROR=0
case $1 in
start)
pid=$(tomcat_pid)
if [ -n "$pid" ] && ps --pid $pid 2>&1 1>/dev/null; then
echo -e "\e[00;33mTomcat is already running (pid: $pid)\e[00m"
ERROR=1
else
/opt/cpanel/ea-tomcat85/bin/user-startup.sh
fi
;;
stop)
pid=$(tomcat_pid)
if [ ! -n "$pid" ] || [ ! ps --pid $pid 2>&1 1>/dev/null ]; then
echo -e "\e[00;31mTomcat is already shutdown\e[00m"
ERROR=1
else
/opt/cpanel/ea-tomcat85/bin/user-shutdown.sh
fi
;;
restart|force-reload|reload)
pid=$(tomcat_pid)
if [ -n "$pid" ] && ps --pid $pid 2>&1 1>/dev/null; then
/opt/cpanel/ea-tomcat85/bin/user-shutdown.sh
fi
/opt/cpanel/ea-tomcat85/bin/user-startup.sh
;;
status|fullstatus)
pid=$(tomcat_pid)
if [ -f "$CATALINA_PID" ]; then
if ps --pid $pid 2>&1 1>/dev/null; then
echo -e "\e[00;32mTomcat is running!\e[00m"
ERROR=0
else
echo "$CATALINA_PID found, but $pid is not running"
ERROR=4
fi
else
echo -e "\e[00;31mTomcat is currently not running.\e[00m"
ERROR=3
fi
;;
*)
echo $"Usage: $0 {start|stop|restart|status|fullstatus}"
ERROR=2
esac
exit $ERROR
<file_sep>#!/bin/sh
# cpanel - ea-tomcat85 Copyright 2018 cPanel, Inc.
# All rights Reserved.
# <EMAIL> http://cpanel.net
# This code is subject to the cPanel license. Unauthorized copying is prohibited
#
# Checkconfig Stanzas:
# -----------------------------------------------------------------------------------
# chkconfig: 2345 98 15
# description: Apache Tomcat 8.5 is an open source web server and servlet container.
# processname: /usr/bin/java
# source function library
. /etc/rc.d/init.d/functions
export CATALINA_HOME=/opt/cpanel/ea-tomcat85
export CATALINA_BASE=/opt/cpanel/ea-tomcat85
. /opt/cpanel/ea-tomcat85/bin/setenv.sh
tomcat_pid() {
if [ -n "$CATALINA_PID" ] && [ -e $CATALINA_PID ]; then
cat $CATALINA_PID
fi
}
case $1 in
start)
pid=$(tomcat_pid)
file=/opt/cpanel/ea-tomcat85/bin/startup.sh
# no way to start tomcat
if [ ! -x $file ]; then
echo -e "\e[00m;31mMissing $file\e[00m"
ERROR=1
else
# tomcat disabled by whm
if [ -e /etc/tomcatdisable ]; then
echo -e "\e[00;31mTomcat is disabled by cPanel/WHM\e[00m"
ERROR=0
else
ERROR=0
if [ -n "$pid" ]; then
echo -e "\e[00;33mTomcat is already running (pid: $pid)\e[00m"
else
# start tomcat, not running
su -s $file tomcat &>/dev/null
echo -e "\e[00;33mGiving Tomcat time to start up ......\e[00m"
sleep 1 # just to make sure
pid=$(tomcat_pid)
if [ -n "$pid" ]; then
ERROR=0
echo -e "\e[00;32mTomcat has started!\e[00m"
else
echo -e "\e[00;31mFailed to start Tomcat\e[00m"
ERROR=1
fi
fi
fi
fi
;;
stop)
pid=$(tomcat_pid)
file=/opt/cpanel/ea-tomcat85/bin/shutdown.sh
if [ ! -n "$pid" ]; then
echo -e "\e[00;31mTomcat is already shutdown\e[00m"
ERROR=1
else
if [ ! -x $file ]; then
echo -e "\e[00;31mMissing $file\e[00m"
ERROR=1
else
su -s $file tomcat
echo -e "\e[00;32mTomcat has shutdown.\e[00m"
ERROR=0
fi
fi
;;
restart)
file=/usr/local/cpanel/scripts/restartsrv_ea_tomcat85
if [ ! -x $file ]; then
echo "Missing $file"
ERROR=1
else
$file # must be run as root, it is ok though because it comes back in here for start/stop
fi
;;
status|fullstatus)
pid=$(tomcat_pid)
if [ -n "$pid" ]; then
echo -e "\e[00;32mTomcat is running!\e[00m"
else
echo -e "\e[00;31mTomcat is currently not running.\e[00m"
fi
;;
*)
echo $"Usage: $0 {start|stop|restart|status|fullstatus}"
ERROR=2
esac
exit $ERROR
<file_sep>export CATALINA_BASE=$HOME/ea-tomcat85
export CATALINA_HOME=/opt/cpanel/ea-tomcat85
export CATALINA_PID=$HOME/ea-tomcat85/run/catalina.pid
<file_sep>export CATALINA_OPTS="$CATALINA_OPTS -server -Dfile.encoding=UTF-8 -Xms128m -Xmx6248m"
export CATALINA_PID="/var/run/ea-tomcat85/catalina.pid"
<file_sep>. $HOME/ea-tomcat85/bin/setenv.sh
$CATALINA_HOME/bin/startup.sh
| 493420ce5ed33961d8c1bcf7d181faf40cf15f50 | [
"Makefile",
"Shell"
] | 7 | Makefile | CpanelInc/ea-tomcat85 | f9a3ced1bf50a47ce151a22f68487565beef8464 | 49dcc2976dc7403490803257f7d830e59930816b |
refs/heads/dev | <file_sep>//! Store activities (current, finished) as Json files.
use crate::rtw_core::activity::{Activity, OngoingActivity};
use crate::rtw_core::storage::Storage;
use crate::rtw_core::ActivityId;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use std::fs::{File, OpenOptions};
use std::path::{Path, PathBuf};
use thiserror::Error;
type Activities = Vec<Activity>;
type ActivityWithId = (ActivityId, Activity);
type OngoingActivityWithId = (ActivityId, OngoingActivity);
#[derive(Debug, Clone, Serialize, Deserialize)]
struct FinishedActivities {
#[serde(default)]
pub semver: Option<String>,
pub activities: Activities,
}
impl Default for FinishedActivities {
fn default() -> Self {
FinishedActivities {
semver: Some(crate_version!().to_string()),
activities: vec![],
}
}
}
#[derive(Error, Debug)]
pub enum JsonStorageError {
#[error("storage io error")]
IOError(#[from] std::io::Error),
#[error("(de)serialization failed")]
SerdeJsonError(#[from] serde_json::error::Error),
}
pub struct JsonStorage {
current_path: PathBuf,
finished_path: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OngoingActivities {
ongoing: Vec<OngoingActivity>,
}
impl JsonStorage {
pub fn new(current_path: PathBuf, finished_path: PathBuf) -> Self {
JsonStorage {
current_path,
finished_path,
}
}
fn get_finished_activities(&self) -> Result<FinishedActivities, JsonStorageError> {
if Path::exists(&self.finished_path) {
let file = OpenOptions::new()
.read(true)
.write(false)
.open(&self.finished_path)?;
let finished_activities: serde_json::error::Result<FinishedActivities> =
serde_json::from_reader(file);
finished_activities.or_else(|_| {
let file = OpenOptions::new()
.read(true)
.write(false)
.open(&self.finished_path)?;
// try to parse legacy format.
let activities: Activities = serde_json::from_reader(file)?;
Ok(FinishedActivities {
semver: None,
activities,
})
})
} else {
Ok(FinishedActivities::default())
}
}
fn get_sorted_activities(&self) -> Result<Vec<(ActivityId, Activity)>, JsonStorageError> {
let mut finished_activities = self.get_finished_activities()?;
finished_activities.activities.sort();
Ok((0..finished_activities.activities.len())
.rev()
.zip(finished_activities.activities)
.collect())
}
}
impl Storage for JsonStorage {
type StorageError = JsonStorageError;
fn write_activity(&mut self, activity: Activity) -> Result<(), Self::StorageError> {
if !Path::exists(&self.finished_path) {
let file = File::create(&self.finished_path)?;
let activities: Activities = vec![activity];
let finished_activities = FinishedActivities {
semver: Some(crate_version!().to_string()),
activities,
};
serde_json::to_writer(file, &finished_activities)?;
Ok(())
} else {
let mut finished_activities = self.get_finished_activities()?;
finished_activities.activities.push(activity);
let file = OpenOptions::new()
.write(true)
.truncate(true)
.open(&self.finished_path)?;
finished_activities.semver = Some(crate_version!().to_string());
serde_json::to_writer(file, &finished_activities)?;
Ok(())
}
}
fn filter_activities<P>(&self, p: P) -> Result<Vec<ActivityWithId>, Self::StorageError>
where
P: Fn(&(ActivityId, Activity)) -> bool,
{
let indexed_finished_activities = self.get_sorted_activities()?;
let filtered = indexed_finished_activities.into_iter().filter(p);
Ok(filtered.collect())
}
fn get_finished_activities(&self) -> Result<Vec<ActivityWithId>, Self::StorageError> {
self.get_sorted_activities()
}
fn delete_activity(&self, id: usize) -> Result<Option<Activity>, Self::StorageError> {
let finished_activities = self.get_sorted_activities()?;
let (removed, kept): (Vec<&ActivityWithId>, Vec<&ActivityWithId>) = finished_activities
.iter()
.partition(|(finished_id, _)| *finished_id == id);
let kept: Vec<&Activity> = kept.iter().map(|(_, a)| a).collect();
let file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(&self.finished_path)?;
serde_json::to_writer(file, &kept)?;
Ok(match removed.as_slice() {
[(_, removed)] => Some(removed.clone()),
_ => None,
})
}
fn get_ongoing_activities(&self) -> Result<Vec<OngoingActivityWithId>, Self::StorageError> {
if !Path::exists(&self.current_path) {
Ok(vec![])
} else {
let file = File::open(&self.current_path)?;
let ongoing_activities: OngoingActivities = serde_json::from_reader(file)?;
Ok(ongoing_activities
.ongoing
.iter()
.cloned()
.sorted()
.enumerate()
.collect())
}
}
fn get_ongoing_activity(
&self,
id: ActivityId,
) -> Result<Option<OngoingActivity>, Self::StorageError> {
let ongoing_activities = self.get_ongoing_activities()?;
let ongoing = ongoing_activities
.iter()
.find(|(a_id, _a)| *a_id == id)
.map(|(_a_id, a)| a);
Ok(ongoing.cloned())
}
fn add_ongoing_activity(
&mut self,
activity: OngoingActivity,
) -> Result<(), Self::StorageError> {
let ongoing_activities = self.get_ongoing_activities()?;
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&self.current_path)?;
serde_json::to_writer(
file,
&OngoingActivities {
ongoing: ongoing_activities
.iter()
.map(|(_a_id, a)| a)
.sorted()
.cloned()
.chain(std::iter::once(activity))
.collect(),
},
)?;
Ok(())
}
fn remove_ongoing_activity(
&mut self,
id: ActivityId,
) -> Result<Option<OngoingActivity>, Self::StorageError> {
let ongoing_activities = self.get_ongoing_activities()?;
let (removed, kept): (Vec<OngoingActivityWithId>, Vec<OngoingActivityWithId>) =
ongoing_activities
.iter()
.cloned()
.partition(|(a_id, _a)| *a_id == id);
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&self.current_path)?;
let kept_without_id: Vec<OngoingActivity> =
kept.iter().cloned().sorted().map(|(_a_id, a)| a).collect();
serde_json::to_writer(
file,
&OngoingActivities {
ongoing: kept_without_id,
},
)?;
Ok(removed.first().cloned().map(|(_a_id, a)| a))
}
}
<file_sep>//! Newtype on `chrono::Duration`
use chrono::Duration;
use std::fmt;
use std::fmt::{Error, Formatter};
use std::iter::Sum;
use std::ops::Add;
/// Newtype on `chrono::Duration`
pub struct DurationW(chrono::Duration);
impl fmt::Display for DurationW {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(
f,
"{:02}:{:02}:{:02}",
self.0.num_seconds() / 3600,
(self.0.num_seconds() / 60) % 60,
(self.0.num_seconds() % 60)
)
}
}
impl DurationW {
pub fn new(d: Duration) -> Self {
DurationW(d)
}
}
impl Default for DurationW {
fn default() -> Self {
DurationW::new(Duration::seconds(0))
}
}
impl From<Duration> for DurationW {
fn from(d: Duration) -> Self {
DurationW(d)
}
}
impl Into<Duration> for DurationW {
fn into(self) -> Duration {
self.0
}
}
impl Add<DurationW> for DurationW {
type Output = DurationW;
fn add(self, rhs: DurationW) -> Self::Output {
DurationW::new(self.0 + rhs.0)
}
}
impl Sum for DurationW {
fn sum<I: Iterator<Item = DurationW>>(iter: I) -> Self {
iter.fold(DurationW::default(), Add::add)
}
}
<file_sep>//! # RTW
//!
//! Command-line interface (CLI) time tracker.
//!
//! CLI usage is stable, underlying API is **not stable**.
//!
//! This project is heavily inspired from [Timewarrior](https://github.com/GothenburgBitFactory/timewarrior).
//!
//! For a stable feature-rich CLI time tracker, please use Timewarrior: <https://timewarrior.net/>.
//!
//! ## Design
//!
//! * Activities are stored inside a `Storage`.
//! * An `ActivityService` provides the logic above a storage.
//! * `rtw_cli::run` translates CLI args to actions (`RTWAction`).
//! * `rtw_cli::run_action` performs actions `RTWAction` by calling the service.
<file_sep>#!/bin/bash -v
RELEASE_DIR="release$(date +%F-%H-%M-%S)"
rustfmt --check src/**/*.rs &&
cargo test &&
docker run --rm -it -v "$(pwd)":/home/rust/src ekidd/rust-musl-builder cargo build --release &&
mkdir -p ${RELEASE_DIR} &&
cp -r ./CHANGELOG.md ./commands.md ./shell-completion.md \
example/ img/ ./README.md ./LICENSE ${RELEASE_DIR} &&
cp target/x86_64-unknown-linux-musl/release/rtw ${RELEASE_DIR}/rtw-x86_64-unknown-linux-musl
<file_sep>//! Translate CLI args to calls to activity Service.
use crate::cli_helper;
use crate::ical_export::export_activities_to_ical;
use crate::rtw_cli::OptionalOrAmbiguousOrNotFound::Optional;
use crate::rtw_config::RTWConfig;
use crate::rtw_core::activity::{Activity, OngoingActivity};
use crate::rtw_core::clock::Clock;
use crate::rtw_core::datetimew::DateTimeW;
use crate::rtw_core::durationw::DurationW;
use crate::rtw_core::service::ActivityService;
use crate::rtw_core::storage::Storage;
use crate::rtw_core::ActivityId;
use crate::rtw_core::{Description, Tags};
use crate::service::Service;
use crate::timeline::render_days;
use clap::ArgMatches;
use itertools::Itertools;
type ActivityWithId = (ActivityId, Activity);
/// Describe the action to be made
///
/// see `run`
pub enum RTWAction {
Cancel(Option<ActivityId>),
Start(DateTimeW, Tags, Option<Description>),
Track((DateTimeW, DateTimeW), Tags, Option<Description>),
Stop(DateTimeW, Option<ActivityId>),
Summary((DateTimeW, DateTimeW), bool, bool, bool),
DumpICal((DateTimeW, DateTimeW)),
Continue(Option<ActivityId>),
Delete(ActivityId),
DisplayCurrent,
Timeline((DateTimeW, DateTimeW)),
Completion(clap::Shell),
}
pub enum RTWMutation {
Start(OngoingActivity),
Track(Activity),
Stop(DateTimeW, ActivityId),
Delete(ActivityId),
Cancel(ActivityId),
Pure,
}
enum OptionalOrAmbiguousOrNotFound {
Optional(Option<(ActivityId, OngoingActivity)>),
Ambiguous,
NotFound(ActivityId),
}
fn merge_same_tags(activities: &[ActivityWithId]) -> Vec<(ActivityId, Activity, DurationW, usize)> {
let uniques: Vec<ActivityWithId> = activities
.iter()
.cloned()
.unique_by(|(_i, activity)| activity.get_title())
.collect();
uniques
.iter()
.cloned()
.map(|(i, activity)| {
let same_tag = activities
.iter()
.filter(|(_i, other)| activity.get_title() == other.get_title());
let durations: Vec<DurationW> = same_tag.map(|(_i, a)| a.get_duration()).collect();
let segments = durations.len();
let duration = durations.into_iter().sum();
(i, activity, duration, segments)
})
.collect()
}
fn get_ongoing_activity<S: Storage>(
id_maybe: Option<ActivityId>,
service: &Service<S>,
) -> anyhow::Result<OptionalOrAmbiguousOrNotFound> {
match id_maybe {
None => match service.get_ongoing_activities()?.as_slice() {
[] => Ok(OptionalOrAmbiguousOrNotFound::Optional(None)),
[(cancelled_id, cancelled)] => Ok(OptionalOrAmbiguousOrNotFound::Optional(Some((
*cancelled_id,
cancelled.clone(),
)))),
_ => Ok(OptionalOrAmbiguousOrNotFound::Ambiguous),
},
Some(cancelled_id) => match service.get_ongoing_activity(cancelled_id)? {
None => Ok(OptionalOrAmbiguousOrNotFound::NotFound(cancelled_id)),
Some(cancelled) => Ok(OptionalOrAmbiguousOrNotFound::Optional(Some((
cancelled_id,
cancelled,
)))),
},
}
}
/// Translate CLI args to actions (side-effect free)
///
/// It may fetch data from underlying activity storage but it should not write anything.
pub fn run<Cl>(matches: &ArgMatches, clock: &Cl) -> anyhow::Result<RTWAction>
where
Cl: Clock,
{
match matches.subcommand() {
("start", Some(sub_m)) => {
let (start_time, tags, description) = cli_helper::parse_start_args(sub_m, clock)?;
let abs_start_time = clock.date_time(start_time);
Ok(RTWAction::Start(abs_start_time, tags, description))
}
("stop", Some(sub_m)) => {
let (stop_time, stopped_id_maybe) = cli_helper::parse_stop_args(sub_m, clock)?;
let abs_stop_time = clock.date_time(stop_time);
Ok(RTWAction::Stop(abs_stop_time, stopped_id_maybe))
}
("summary", Some(sub_m)) => {
let ((range_start, range_end), display_id, display_description, report) =
cli_helper::parse_summary_args(sub_m, clock)?;
Ok(RTWAction::Summary(
(range_start, range_end),
display_id,
display_description,
report,
))
}
("timeline", Some(sub_m)) => {
let ((range_start, range_end), _display_id) =
cli_helper::parse_timeline_args(sub_m, clock)?;
Ok(RTWAction::Timeline((range_start, range_end)))
}
("continue", Some(sub_m)) => {
let continue_id_maybe = cli_helper::parse_continue_args(sub_m)?;
Ok(RTWAction::Continue(continue_id_maybe))
}
("delete", Some(sub_m)) => {
let id = cli_helper::parse_delete_args(sub_m)?;
Ok(RTWAction::Delete(id))
}
("track", Some(sub_m)) => {
let (start_time, stop_time, tags, description) =
cli_helper::parse_track_args(sub_m, clock)?;
let start_time = clock.date_time(start_time);
let stop_time = clock.date_time(stop_time);
Ok(RTWAction::Track((start_time, stop_time), tags, description))
}
("day", Some(_sub_m)) => {
let (range_start, range_end) = clock.today_range();
Ok(RTWAction::Timeline((range_start, range_end)))
}
("week", Some(_sub_m)) => {
let (range_start, range_end) = clock.this_week_range();
Ok(RTWAction::Timeline((range_start, range_end)))
}
("cancel", Some(sub_m)) => {
let cancelled_id_maybe = cli_helper::parse_cancel_args(sub_m)?;
Ok(RTWAction::Cancel(cancelled_id_maybe))
}
("dump", Some(sub_m)) => {
let ((range_start, range_end), _display_id, _description, _report) =
cli_helper::parse_summary_args(sub_m, clock)?;
Ok(RTWAction::DumpICal((range_start, range_end)))
}
("completion", Some(sub_m)) => {
let shell = cli_helper::parse_completion_args(sub_m)?;
Ok(RTWAction::Completion(shell))
}
// default case: display current activity
_ => Ok(RTWAction::DisplayCurrent),
}
}
/// Dry run (side effect-free)
pub fn dry_run_action<S, Cl>(
action: RTWAction,
service: &Service<S>,
clock: &Cl,
config: &RTWConfig,
) -> anyhow::Result<RTWMutation>
where
S: Storage,
Cl: Clock,
{
match action {
RTWAction::Start(start_time, tags, description) => {
let started = OngoingActivity::new(start_time, tags, description);
println!("Tracking {}", started.get_title());
println!("Started {}", started.get_start_time());
Ok(RTWMutation::Start(started))
}
RTWAction::Track((start_time, stop_time), tags, description) => {
let tracked =
OngoingActivity::new(start_time, tags, description).into_activity(stop_time)?;
println!("Recorded {}", tracked.get_title());
println!("Started {:>20}", tracked.get_start_time());
println!("Ended {:>20}", tracked.get_stop_time());
println!("Total {:>20}", tracked.get_duration());
Ok(RTWMutation::Track(tracked))
}
RTWAction::Stop(stop_time, activity_id) => {
match get_ongoing_activity(activity_id, &service)? {
Optional(None) => {
println!("There is no active time tracking.");
Ok(RTWMutation::Pure)
}
Optional(Some((stopped_id, stopped))) => {
println!("Recorded {}", stopped.get_title());
println!("Started {:>20}", stopped.get_start_time());
println!("Ended {:>20}", stop_time);
println!("Total {:>20}", stop_time - stopped.get_start_time());
Ok(RTWMutation::Stop(stop_time, stopped_id))
}
OptionalOrAmbiguousOrNotFound::Ambiguous => {
println!("Multiple ongoing activities, please provide an id.");
Ok(RTWMutation::Pure)
}
OptionalOrAmbiguousOrNotFound::NotFound(stopped_id) => {
println!("No ongoing activity with id {}.", stopped_id);
Ok(RTWMutation::Pure)
}
}
}
RTWAction::Summary((range_start, range_end), display_id, display_description, report) => {
let activities = service.get_finished_activities()?;
let activities: Vec<(ActivityId, Activity)> = activities
.iter()
.filter(|(_i, a)| {
range_start <= a.get_start_time() && a.get_start_time() <= range_end
})
.cloned()
.collect();
let longest_title = activities
.iter()
.map(|(_id, a)| a.get_title().len())
.max()
.unwrap_or_default();
if activities.is_empty() {
println!("No filtered data found.");
} else if report {
let activities_report = merge_same_tags(activities.as_slice());
for (_id, finished, duration, segments) in activities_report {
let singular_or_plural = if segments <= 1 {
String::from("segment")
} else {
// segments > 1
String::from("segments")
};
let output = format!(
"{:width$} {} ({} {})",
finished.get_title(),
duration,
segments,
singular_or_plural,
width = longest_title
);
println!("{}", output)
}
} else {
for (id, finished) in activities {
let output = format!(
"{:width$} {} {} {}",
finished.get_title(),
finished.get_start_time(),
finished.get_stop_time(),
finished.get_duration(),
width = longest_title
);
let output = if display_id {
format!("{:>1} {}", id, output)
} else {
output
};
let output = match (display_description, finished.get_description()) {
(false, _) => output,
(true, None) => output,
(true, Some(description)) => format!("{}\n{}", output, description),
};
println!("{}", output)
}
}
Ok(RTWMutation::Pure)
}
RTWAction::Continue(activity_id) => {
let activities = service.get_finished_activities()?;
let activity_id = activity_id.unwrap_or(0); // id 0 == last finished activity
let continued_maybe = activities.iter().find(|(id, _a)| *id == activity_id);
match continued_maybe {
None => {
println!("No activity to continue from.");
Ok(RTWMutation::Pure)
}
Some((_id, finished)) => {
println!("Tracking {}", finished.get_title());
let new_current = OngoingActivity::new(
clock.get_time(),
finished.get_tags(),
finished.get_description(),
);
Ok(RTWMutation::Start(new_current))
}
}
}
RTWAction::Delete(activity_id) => {
let deleted = service.filter_activities(|(i, _)| *i == activity_id)?;
let deleted_maybe = deleted.first();
match deleted_maybe {
None => {
println!("No activity found for id {}.", activity_id);
Ok(RTWMutation::Pure)
}
Some((deleted_id, deleted)) => {
println!("Deleted {}", deleted.get_title());
println!("Started {:>20}", deleted.get_start_time());
println!("Ended {:>20}", deleted.get_stop_time());
println!("Total {:>20}", deleted.get_duration());
Ok(RTWMutation::Delete(*deleted_id))
}
}
}
RTWAction::DisplayCurrent => {
let ongoing_activities = service.get_ongoing_activities()?;
if ongoing_activities.is_empty() {
println!("There is no active time tracking.");
} else {
for (id, ongoing_activity) in ongoing_activities {
println!("Tracking {}", ongoing_activity.get_title());
println!(
"Total {}",
clock.get_time() - ongoing_activity.get_start_time()
);
println!("Id {}", id);
}
}
Ok(RTWMutation::Pure)
}
RTWAction::Timeline((range_start, range_end)) => {
let activities = service.get_finished_activities()?;
let activities: Vec<ActivityWithId> = activities
.iter()
.filter(|(_i, a)| {
range_start <= a.get_start_time() && a.get_start_time() <= range_end
})
.cloned()
.collect();
let now = clock.get_time();
let ongoing_activities = service.get_ongoing_activities()?;
let ongoing_activities: Vec<ActivityWithId> = ongoing_activities
.iter()
.filter(|(_i, a)| {
range_start <= a.get_start_time() && a.get_start_time() <= range_end
})
.filter_map(|(i, a)| match a.clone().into_activity(now) {
Ok(a) => Some((*i, a)),
_ => None,
})
.collect();
let timeline_activities: Vec<ActivityWithId> = activities
.iter()
.cloned()
.chain(ongoing_activities.iter().cloned())
.collect();
let rendered = render_days(timeline_activities.as_slice(), &config.timeline_colors)?;
for line in rendered {
println!("{}", line);
}
Ok(RTWMutation::Pure)
}
RTWAction::Cancel(id_maybe) => match get_ongoing_activity(id_maybe, service)? {
Optional(None) => {
println!("Nothing to cancel: there is no active time tracking.");
Ok(RTWMutation::Pure)
}
Optional(Some((cancelled_id, cancelled))) => {
println!("Cancelled {}", cancelled.get_title());
println!("Started {:>20}", cancelled.get_start_time());
println!(
"Total {:>20}",
clock.get_time() - cancelled.get_start_time()
);
Ok(RTWMutation::Cancel(cancelled_id))
}
OptionalOrAmbiguousOrNotFound::Ambiguous => {
println!("Multiple ongoing activities, please provide an id.");
Ok(RTWMutation::Pure)
}
OptionalOrAmbiguousOrNotFound::NotFound(cancelled_id) => {
println!("No ongoing activity with id {}.", cancelled_id);
Ok(RTWMutation::Pure)
}
},
RTWAction::DumpICal((range_start, range_end)) => {
let activities = service.get_finished_activities()?;
let activities: Vec<Activity> = activities
.iter()
.map(|(_i, a)| a)
.filter(|a| range_start <= a.get_start_time() && a.get_start_time() <= range_end)
.cloned()
.collect();
let calendar = export_activities_to_ical(activities.as_slice());
println!("{}", calendar);
Ok(RTWMutation::Pure)
}
RTWAction::Completion(shell) => {
let mut app = cli_helper::get_app();
app.gen_completions_to(crate_name!(), shell, &mut std::io::stdout());
Ok(RTWMutation::Pure)
}
}
}
/// Side effect
pub fn run_mutation<S>(
action: RTWMutation,
service: &mut Service<S>,
config: &RTWConfig,
) -> anyhow::Result<()>
where
S: Storage,
{
match action {
RTWMutation::Start(activity) => {
let _started = service.start_activity(activity, config.deny_overlapping)?;
Ok(())
}
RTWMutation::Track(activity) => {
let _tracked = service.track_activity(activity, config.deny_overlapping)?;
Ok(())
}
RTWMutation::Stop(stop_time, activity_id) => {
let _stopped =
service.stop_ongoing_activity(stop_time, activity_id, config.deny_overlapping)?;
Ok(())
}
RTWMutation::Delete(activity_id) => {
let _deleted = service.delete_activity(activity_id)?;
Ok(())
}
RTWMutation::Cancel(activity_id) => {
let _cancelled = service.cancel_ongoing_activity(activity_id)?;
Ok(())
}
RTWMutation::Pure => {
// pure nothing to do
Ok(())
}
}
}
<file_sep>//! Core traits and data structures.
pub mod activity;
pub mod clock;
pub mod datetimew;
pub mod durationw;
pub mod service;
pub mod storage;
/// Absolute dates are parsed and displayed using this format
///
/// e.g. 2019-12-25T18:43:00
pub const DATETIME_FMT: &str = "%Y-%m-%dT%H:%M:%S";
/// `Tag` = `String`
pub type Tag = String;
/// `Tags` = `Vec<Tag>`
pub type Tags = Vec<Tag>;
/// `ActivityId` = `usize`
pub type ActivityId = usize;
/// `Description` = `String`
pub type Description = String;
<file_sep>#[macro_use]
extern crate clap;
use crate::chrono_clock::ChronoClock;
use crate::cli_helper::get_app;
use crate::json_storage::JsonStorage;
use crate::rtw_cli::{dry_run_action, run, run_mutation};
use crate::rtw_config::{load_config, RTWConfig};
use crate::service::Service;
use std::path::PathBuf;
use std::str::FromStr;
mod chrono_clock;
mod cli_helper;
mod ical_export;
mod json_storage;
mod rtw_cli;
mod rtw_config;
mod rtw_core;
mod service;
mod time_tools;
mod timeline;
fn main() -> anyhow::Result<()> {
let clock = ChronoClock {};
let app = get_app();
let matches = app.get_matches();
let config = load_config()?;
let config = if matches.is_present("default") {
RTWConfig::default()
} else {
config
};
let config = if matches.is_present("overlap") {
config.deny_overlapping(false)
} else {
config
};
let config = if matches.is_present("no_overlap") {
config.deny_overlapping(true)
} else {
config
};
let storage_dir = match matches.value_of("directory") {
None => config.storage_dir_path.clone(),
Some(dir_str) => PathBuf::from_str(dir_str).expect("invalid directory"),
};
let current_activity_path = storage_dir.join(".rtw.json");
let finished_activity_path = storage_dir.join(".rtwh.json");
let mut service = Service::new(JsonStorage::new(
current_activity_path,
finished_activity_path,
));
#[cfg(windows)]
{
ansi_term::enable_ansi_support().unwrap_or(());
}
let action = run(&matches, &clock)?;
let mutation = dry_run_action(action, &service, &clock, &config)?;
if matches.is_present("dry-run") {
println!("(dry-run) nothing done");
Ok(())
} else {
run_mutation(mutation, &mut service, &config)
}
}
<file_sep>//! Storage: abstracts activities storage (file, memory...)
use crate::rtw_core::activity::{Activity, OngoingActivity};
use crate::rtw_core::ActivityId;
use std::error::Error;
pub trait Storage {
// see anyhow::Error type constraints
type StorageError: Error + Sync + Send + 'static;
/// Write finished activity
///
/// May fail depending on backend implementation
fn write_activity(&mut self, activity: Activity) -> Result<(), Self::StorageError>;
/// Filter finished activities
///
/// May fail depending on implementation
///
/// Returns finished activities sorted by start date
///
/// ActivityId: 0 <=> last finished activity
fn filter_activities<P>(&self, p: P) -> Result<Vec<(ActivityId, Activity)>, Self::StorageError>
where
P: Fn(&(ActivityId, Activity)) -> bool;
/// Get all finished activities
///
/// May fail depending on implementation
///
/// Returns finished activities sorted by start date
///
/// ActivityId: 0 <=> last finished activity
fn get_finished_activities(&self) -> Result<Vec<(ActivityId, Activity)>, Self::StorageError>;
/// Delete activity with id
///
/// May fail depending on implementation
///
/// Returns deleted activity if successful
fn delete_activity(&self, id: ActivityId) -> Result<Option<Activity>, Self::StorageError>;
/// Retrieve ongoing activities if any
///
/// May fail depending on backend implementation
fn get_ongoing_activities(
&self,
) -> Result<Vec<(ActivityId, OngoingActivity)>, Self::StorageError>;
/// Retrieve ongoing activity with id if any
///
/// May fail depending on backend implementation
fn get_ongoing_activity(
&self,
id: ActivityId,
) -> Result<Option<OngoingActivity>, Self::StorageError>;
/// Add `activity` to ongoing activities
///
/// May fail depending on backend implementation
fn add_ongoing_activity(&mut self, activity: OngoingActivity)
-> Result<(), Self::StorageError>;
/// Remove ongoing activity
///
/// May fail depending on backend implementation
fn remove_ongoing_activity(
&mut self,
id: ActivityId,
) -> Result<Option<OngoingActivity>, Self::StorageError>;
}
<file_sep>use crate::rtw_core::activity::Activity;
use crate::rtw_core::datetimew::DateTimeW;
use chrono::{DateTime, Local};
use icalendar::Calendar;
use icalendar::CalendarDateTime;
use icalendar::Component;
use icalendar::Event;
impl From<DateTimeW> for CalendarDateTime {
fn from(d: DateTimeW) -> Self {
let local: DateTime<Local> = d.into();
local.naive_utc().into()
}
}
impl From<Activity> for Event {
fn from(a: Activity) -> Self {
let start_time = a.get_start_time();
let stop_time = a.get_stop_time();
let title = a.get_title();
match a.get_description() {
None => Event::new()
.summary(title.as_str())
.starts(start_time)
.ends(stop_time)
.done(),
Some(description) => Event::new()
.summary(title.as_str())
.description(&description)
.starts(start_time)
.ends(stop_time)
.done(),
}
}
}
pub(crate) fn export_activities_to_ical(activities: &[Activity]) -> Calendar {
let mut calendar = Calendar::new();
for activity in activities {
let event: Event = activity.clone().into();
calendar.push(event);
}
calendar
}
<file_sep>[package]
name = "rtw"
version = "2.2.0"
authors = ["PicoJr <<EMAIL>>"]
edition = "2018"
repository = "https://github.com/PicoJr/rtw"
description = "time tracker command line tool"
license = "MIT OR Apache-2.0"
readme = "README.md"
keywords = ["time", "tracker", "cli", "tool"]
categories = ["command-line-utilities"]
include = ["src/**/*", "/LICENSE", "/README.md", "/CHANGELOG.md", "/commands.md", "/shell-completion.md", "/img/*"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = "2.33.0"
anyhow = "1.0"
thiserror = "1.0.19"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
dirs-next = "2.0.0"
chrono = { version = "0.4", features = ["serde"] }
htp = "0.2.1"
config = "0.10.1"
ansi_term = "0.12.1"
term_size = "0.3.2"
tbl = "1.1.0-alpha.1"
icalendar = "0.9.0"
itertools = "0.9"
[dev-dependencies]
tempfile = "3"
assert_cmd = "0.12"
predicates = "1.0.4"
<file_sep>//! Logic above an activity storage
use crate::rtw_core::activity::{intersect, overlap, Activity, OngoingActivity};
use crate::rtw_core::datetimew::DateTimeW;
use crate::rtw_core::service::ActivityService;
use crate::rtw_core::storage::Storage;
use crate::rtw_core::ActivityId;
use anyhow::anyhow;
pub struct Service<S>
where
S: Storage,
{
storage: S,
}
impl<S> Service<S>
where
S: Storage,
{
pub fn new(storage: S) -> Self {
Service { storage }
}
}
impl<S> ActivityService for Service<S>
where
S: Storage,
{
fn get_ongoing_activities(&self) -> anyhow::Result<Vec<(ActivityId, OngoingActivity)>> {
self.storage.get_ongoing_activities().map_err(|e| e.into())
}
fn get_ongoing_activity(&self, id: ActivityId) -> anyhow::Result<Option<OngoingActivity>> {
self.storage.get_ongoing_activity(id).map_err(|e| e.into())
}
fn start_activity(
&mut self,
activity: OngoingActivity,
deny_overlapping: bool,
) -> anyhow::Result<(OngoingActivity, Option<Activity>)> {
let finished = self.storage.get_finished_activities()?;
if deny_overlapping {
let intersections = time_intersections(finished.as_slice(), &activity.start_time);
if intersections.is_empty() {
let ongoing_activities = self.storage.get_ongoing_activities()?;
match ongoing_activities.as_slice() {
[] => {
self.storage.add_ongoing_activity(activity.clone())?;
Ok((activity, None))
}
[(ongoing_id, _ongoing)] => {
let stopped_maybe =
self.stop_ongoing_activity(activity.start_time, *ongoing_id, true)?;
self.storage.add_ongoing_activity(activity.clone())?;
Ok((activity, stopped_maybe))
}
_ => Err(anyhow!(
"multiple ongoing activities but overlapping is disabled\n\
Tip: you can enable overlapping using `rtw --overlap (start|stop|track|...)`"
)),
}
} else {
Err(anyhow!(
"{:?} would overlap {:?}\n\
Tip: you can enable overlapping using `rtw --overlap (start|stop|track|...)`",
activity,
intersections
))
}
} else {
self.storage.add_ongoing_activity(activity.clone())?;
Ok((activity, None))
}
}
fn stop_ongoing_activity(
&mut self,
time: DateTimeW,
id: ActivityId,
deny_overlapping: bool,
) -> anyhow::Result<Option<Activity>> {
let stopped_maybe = self.storage.get_ongoing_activity(id)?;
match stopped_maybe {
None => Ok(None),
Some(ongoing_activity) => {
let stopped = ongoing_activity.clone().into_activity(time)?;
let finished = self.storage.get_finished_activities()?;
let intersections = activity_intersections(finished.as_slice(), &stopped);
if !deny_overlapping || intersections.is_empty() {
self.storage.write_activity(stopped)?;
self.storage.remove_ongoing_activity(id)?;
Ok(Some(ongoing_activity.into_activity(time)?))
} else {
Err(anyhow!(
"{:?} would overlap {:?}\n\
Tip: you can enable overlapping using `rtw --overlap (start|stop|track|...)`",
stopped,
intersections
))
}
}
}
}
fn cancel_ongoing_activity(
&mut self,
id: ActivityId,
) -> anyhow::Result<Option<OngoingActivity>> {
self.storage
.remove_ongoing_activity(id)
.map_err(|e| e.into())
}
fn filter_activities<P>(&self, p: P) -> anyhow::Result<Vec<(ActivityId, Activity)>>
where
P: Fn(&(ActivityId, Activity)) -> bool,
{
self.storage.filter_activities(p).map_err(|e| e.into())
}
fn get_finished_activities(&self) -> anyhow::Result<Vec<(ActivityId, Activity)>> {
self.storage.get_finished_activities().map_err(|e| e.into())
}
fn delete_activity(&self, id: ActivityId) -> anyhow::Result<Option<Activity>> {
self.storage.delete_activity(id).map_err(|e| e.into())
}
fn track_activity(
&mut self,
activity: Activity,
deny_overlapping: bool,
) -> anyhow::Result<Activity> {
let finished = self.storage.get_finished_activities()?;
let intersections = activity_intersections(finished.as_slice(), &activity);
if !deny_overlapping || intersections.is_empty() {
self.storage.write_activity(activity.clone())?;
Ok(activity)
} else {
Err(anyhow!(
"{:?} would overlap {:?}\n\
Tip: you can enable overlapping using `rtw --overlap (start|stop|track|...)`",
activity,
intersections
))
}
}
}
fn activity_intersections(
activities: &[(ActivityId, Activity)],
activity: &Activity,
) -> Vec<Activity> {
activities
.iter()
.filter_map(|(_, a)| overlap(a, activity))
.collect()
}
fn time_intersections(
activities: &[(ActivityId, Activity)],
start_time: &DateTimeW,
) -> Vec<Activity> {
activities
.iter()
.filter_map(|(_, a)| intersect(a, start_time))
.collect()
}
#[cfg(test)]
mod tests {
use crate::chrono_clock::ChronoClock;
use crate::json_storage::JsonStorage;
use crate::rtw_core::activity::OngoingActivity;
use crate::rtw_core::clock::Clock;
use crate::rtw_core::datetimew::DateTimeW;
use crate::rtw_core::service::ActivityService;
use crate::service::Service;
use chrono::{Local, TimeZone};
use tempfile::{tempdir, TempDir};
fn build_json_service(test_dir: &TempDir) -> Service<JsonStorage> {
let finished_path = test_dir.path().join(".rtwh.json");
let current_path = test_dir.path().join(".rtwc.json");
Service::new(JsonStorage::new(current_path, finished_path))
}
#[test]
fn test_no_activity() {
let clock = ChronoClock {};
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
assert!(service
.stop_ongoing_activity(clock.get_time(), 0, true)
.is_ok());
assert!(service.get_ongoing_activities().unwrap().is_empty());
}
#[test]
fn test_start_activity() {
let clock = ChronoClock {};
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
assert!(service
.stop_ongoing_activity(clock.get_time(), 0, true)
.is_ok());
let start = service.start_activity(
OngoingActivity {
start_time: clock.get_time(),
tags: vec![String::from("a")],
description: None,
},
true,
);
start.unwrap();
let current = service.get_ongoing_activities();
assert!(current.is_ok());
assert!(!current.unwrap().is_empty());
}
#[test]
fn test_stop_activity_with_active() {
let clock = ChronoClock {};
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let start = service.start_activity(
OngoingActivity {
start_time: clock.get_time(),
tags: vec![String::from("a")],
description: None,
},
true,
);
start.unwrap();
assert!(!service.get_ongoing_activities().unwrap().is_empty());
assert!(service
.stop_ongoing_activity(clock.get_time(), 0, true)
.is_ok());
assert!(service.get_ongoing_activities().unwrap().is_empty());
}
#[test]
fn test_start_stop_start() {
let clock = ChronoClock {};
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let start_0 = service.start_activity(
OngoingActivity {
start_time: clock.get_time(),
tags: vec![String::from("a")],
description: None,
},
true,
);
assert!(start_0.is_ok());
assert!(!service.get_ongoing_activities().unwrap().is_empty());
let stop = service.stop_ongoing_activity(clock.get_time(), 0, true);
assert!(stop.is_ok());
assert!(service.get_ongoing_activities().unwrap().is_empty());
let start_1 = service.start_activity(
OngoingActivity {
start_time: clock.get_time(),
tags: vec![String::from("b")],
description: None,
},
true,
);
assert!(start_1.is_ok());
assert!(!service.get_ongoing_activities().unwrap().is_empty());
}
#[test]
fn test_start_intersecting_activity() {
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let finished = OngoingActivity::new(
Local
.datetime_from_str("2020-12-25T09:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
vec![],
None,
)
.into_activity(
Local
.datetime_from_str("2020-12-25T10:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
)
.unwrap();
let tracked = service.track_activity(finished, true);
assert!(tracked.is_ok());
let other = OngoingActivity::new(
Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
vec![],
None,
);
let started = service.start_activity(other, true);
assert!(started.is_err());
}
#[test]
fn test_stop_intersecting_activity() {
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let finished = OngoingActivity::new(
Local
.datetime_from_str("2020-12-25T09:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
vec![],
None,
)
.into_activity(
Local
.datetime_from_str("2020-12-25T10:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
)
.unwrap();
let tracked = service.track_activity(finished, true);
assert!(tracked.is_ok());
let other = OngoingActivity::new(
Local
.datetime_from_str("2020-12-25T08:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
vec![],
None,
);
let started = service.start_activity(other, true);
assert!(started.is_ok());
let stopped = service.stop_ongoing_activity(
Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
0, // only one ongoing activity => id is 0
true,
);
assert!(stopped.is_err());
}
#[test]
fn test_summary_nothing() {
let clock = ChronoClock {};
let test_dir = tempdir().expect("error while creating tempdir");
let service = build_json_service(&test_dir);
let (today_start, today_end) = clock.today_range();
let activities = service.filter_activities(|(_id, a)| {
today_start <= a.get_start_time() && a.get_start_time() <= today_end
});
assert!(activities.is_ok());
}
#[test]
fn test_summary_something() {
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let today = chrono::Local::today();
let range_start: DateTimeW = today.and_hms(8, 0, 0).into();
let activity_start: DateTimeW = today.and_hms(8, 30, 0).into();
let activity_end: DateTimeW = today.and_hms(8, 45, 0).into();
let range_end: DateTimeW = today.and_hms(9, 0, 0).into();
service
.track_activity(
OngoingActivity::new(activity_start, vec![], None)
.into_activity(activity_end)
.unwrap(),
true,
)
.unwrap();
let activities = service.filter_activities(|(_id, a)| {
range_start <= a.get_start_time() && a.get_start_time() <= range_end
});
assert!(!activities.unwrap().is_empty());
}
#[test]
fn test_summary_not_in_range() {
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let today = chrono::Local::today();
let range_start: DateTimeW = today.and_hms(9, 0, 0).into();
let activity_start: DateTimeW = today.and_hms(8, 30, 0).into();
let activity_end: DateTimeW = today.and_hms(8, 45, 0).into();
let range_end: DateTimeW = today.and_hms(10, 0, 0).into();
service
.track_activity(
OngoingActivity::new(activity_start, vec![], None)
.into_activity(activity_end)
.unwrap(),
true,
)
.unwrap();
let activities = service.filter_activities(|(_id, a)| {
range_start <= a.get_start_time() && a.get_start_time() <= range_end
});
assert!(activities.unwrap().is_empty());
}
#[test]
fn test_track_intersecting_activity() {
let test_dir = tempdir().expect("error while creating tempdir");
let mut service = build_json_service(&test_dir);
let finished = OngoingActivity::new(
Local
.datetime_from_str("2020-12-25T09:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
vec![],
None,
)
.into_activity(
Local
.datetime_from_str("2020-12-25T10:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
)
.unwrap();
let tracked = service.track_activity(finished, true);
assert!(tracked.is_ok());
let other = OngoingActivity::new(
Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
vec![],
None,
)
.into_activity(
Local
.datetime_from_str("2020-12-25T10:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
)
.unwrap();
let tracked = service.track_activity(other, true);
assert!(tracked.is_err());
}
}
<file_sep># RTW Commands
<!--ts-->
* [RTW Commands](#rtw-commands)
* [Start New Activity](#start-new-activity)
* [Start tracking an activity now](#start-tracking-an-activity-now)
* [Start tracking an activity 4 minutes ago](#start-tracking-an-activity-4-minutes-ago)
* [Start tracking an activity at a specific time](#start-tracking-an-activity-at-a-specific-time)
* [Stop Current Activity](#stop-current-activity)
* [Stop current activity now](#stop-current-activity-now)
* [Stop current activity 4 minutes ago](#stop-current-activity-4-minutes-ago)
* [Stop current activity at a specific time](#stop-current-activity-at-a-specific-time)
* [Cancel current activity](#cancel-current-activity)
* [Display Summary](#display-summary)
* [Display finished activities summary for today](#display-finished-activities-summary-for-today)
* [Display finished activities summary for yesterday](#display-finished-activities-summary-for-yesterday)
* [Display finished activities summary for last week](#display-finished-activities-summary-for-last-week)
* [Display finished activities summary for range](#display-finished-activities-summary-for-range)
* [Display finished activities id](#display-finished-activities-id)
* [Display a report (sum same activities)](#display-a-report-sum-same-activities)
* [Display a timeline](#display-a-timeline)
* [For the day](#for-the-day)
* [For the week](#for-the-week)
* [For a time range](#for-a-time-range)
* [Export Finished Activities to iCalendar](#export-finished-activities-to-icalendar)
* [For today](#for-today)
* [For last week](#for-last-week)
* [For a given date range](#for-a-given-date-range)
* [Continue Activity](#continue-activity)
* [Continue last finished activity](#continue-last-finished-activity)
* [Continue finished activity with id](#continue-finished-activity-with-id)
* [Delete Activity](#delete-activity)
* [Delete Activity with id](#delete-activity-with-id)
* [Track a finished activity](#track-a-finished-activity)
* [Track a finished activity with dates](#track-a-finished-activity-with-dates)
* [Track a finished activity the same day](#track-a-finished-activity-the-same-day)
* [Track an activity and provide a long description](#track-an-activity-and-provide-a-long-description)
* [For multitasking people](#for-multitasking-people)
* [Start (overlapping) activities](#start-overlapping-activities)
* [Stop ongoing activity](#stop-ongoing-activity)
<!--te-->
Created by [gh-md-toc](https://github.com/ekalinin/github-markdown-toc)
## Start New Activity
### Start tracking an activity now
Example:
```
rtw start write doc
```
Example output:
```
Tracking write doc
Started 2019-12-25T19:43:00
```
### Start tracking an activity 4 minutes ago
Example:
```
rtw start 4 min ago write doc
```
Example output:
```
Tracking write doc
Started 2019-12-25T19:39:00
```
### Start tracking an activity at a specific time
Example:
```
rtw start 2019-12-24T19:43:00 write doc
```
Example output:
```
Tracking write doc
Started 2019-12-24T19:43:00
```
## Stop Current Activity
### Stop current activity now
Example:
```
rtw stop
```
Example output:
```
Recorded write doc
Started 2019-12-25T19:43:00
Ended 2019-12-25T19:50:00
Total 00:07:00
```
### Stop current activity 4 minutes ago
Example:
```
rtw stop 4m ago
```
Example output:
```
Recorded write doc
Started 2019-12-25T19:43:00
Ended 2019-12-25T19:46:00
Total 00:03:00
```
### Stop current activity at a specific time
Example:
```
rtw stop 2019-12-25T19:45:00
```
Example output:
```
Recorded write doc
Started 2019-12-25T19:43:00
Ended 2019-12-25T19:45:00
Total 00:02:00
```
## Cancel current activity
Example:
```
rtw cancel
```
Example output:
```
Cancelled write doc
Started 2019-12-24T19:43:00
Total 00:20:05
```
## Display Summary
### Display finished activities summary for today
Example:
```
rtw summary
```
Example output:
```
write doc 2019-12-25T19:43:00 2019-12-25T19:45:00 00:03:000
```
### Display finished activities summary for yesterday
Example:
```
rtw summary --yesterday
```
Example output:
```
write doc 2019-12-24T19:43:00 2019-12-24T19:45:00 00:03:000
```
### Display finished activities summary for last week
Example:
```
rtw summary --lastweek
```
Example output:
```
write doc 2019-12-17T19:43:00 2019-12-17T19:45:00 00:03:000
```
### Display finished activities summary for range
Example:
```
rtw summary 19:00 - 20:00
```
Example output:
```
write doc 2019-12-17T19:43:00 2019-12-17T19:45:00 00:03:000
```
### Display finished activities id
Example:
```
rtw summary --id
```
Example output:
```
2 foo 2019-12-25T17:43:00 2019-12-25T17:44:00 00:01:00
1 another foo 2019-12-25T18:43:00 2019-12-25T18:44:00 00:01:00
0 bar 2019-12-25T19:43:00 2019-12-25T19:44:00 00:01:00
```
> id 0 = last finished activity
### Display a report (sum same activities)
Example:
```
rtw track 8 - 9 foo
rtw track 9 - 10 foo
rtw track 10 - 11 bar
rtw summary --report
```
Example output:
```
foo 02:00:00 (2 segments)
bar 01:00:00 (1 segments)
```
## Display a timeline
### For the day
```bash
rtw day
```
Example output (YMMV):

### For the week
```bash
rtw week
```
### For a time range
```bash
rtw timeline last monday - now
```
## Export Finished Activities to iCalendar
### For today
Example:
```
rtw dump
```
Example output:
```
BEGIN:VCALENDAR
VERSION:2.0
PRODID:ICALENDAR-RS
CALSCALE:GREGORIAN
BEGIN:VEVENT
DTSTAMP:20200616T184116Z
DTEND:20200616T203000
DTSTART:20200616T160000
SUMMARY:build a spaceship
UID:3bc8b3b6-d17b-4e1d-8323-2f55bfb14792
END:VEVENT
END:VCALENDAR
```
Dump to ics file: `rtw dump > today.ics`
### For last week
Example:
```
rtw dump lastweek
```
Dump to ics file: `rtw dump --lastweek > lastweek.ics`
### For a given date range
Example:
```
rtw dump last monday - now
```
Dump to ics file: `rtw dump last monday - now > lastweek.ics`
## Continue Activity
### Continue last finished activity
Example:
```
rtw continue
```
Example output:
```
Tracking write doc
```
### Continue finished activity with id
Example:
```
rtw continue 2
```
Example output:
```
Tracking read twir
```
## Delete Activity
### Delete Activity with id
Example:
```
rtw delete 1
```
Example output:
```
Deleted write doc
Started 2019-12-25T19:43:00
Ended 2019-12-25T19:45:00
Total 00:02:00
```
## Track a finished activity
### Track a finished activity with dates
Example:
```
rtw track 2019-12-25T19:43:00 - 2019-12-25T19:45:00 write doc
```
> please note the `-` separator
Example output
```
Recorded write doc
Started 2019-12-25T19:43:00
Ended 2019-12-25T19:45:00
Total 00:02:00
```
### Track a finished activity the same day
Example:
```
rtw track 09:00 - 10:00 write doc
```
> please note the `-` separator
Example output
```
Recorded write doc
Started 2020-03-14T09:00:00
Ended 2020-03-14T10:00:00
Total 01:00:00
```
## Track an activity and provide a long description
Example:
```
rtw track 9 - 10 breakfast -d "I ate delicious pancakes"
rtw summary -d
```
output:
```
breakfast 2020-07-11T09:00:00 2020-07-11T10:00:00 01:00:00
I ate delicious pancakes
```
## For multitasking people
Requires `deny_overlapping: false` in `rtw_config.json`
### Start (overlapping) activities
Example:
```
rtw start work
rtw start child question -d "answer how fish can breath under water"
rtw
```
Output:
```
./target/debug/rtw PicoJr
Tracking work
Total 00:03:03
Id 0
Tracking child question
Total 00:01:25
Id 1
```
### Stop ongoing activity
`--id` is only required when ongoing activities > 1.
Example:
```
rtw stop --id 1
```
Output:
``` PicoJr
Recorded child question
Started 2020-07-14T10:54:36
Ended 2020-07-14T10:57:23
Total 00:02:47
```
stop the other remaining ongoing activity:
```
rtw stop
```
Output:
```
Recorded work
Started 2020-07-14T10:52:58
Ended 2020-07-14T11:00:17
Total 00:07:18
```
<file_sep>//! Activity and OngoingActivity
use crate::rtw_core::datetimew::DateTimeW;
use crate::rtw_core::durationw::DurationW;
use crate::rtw_core::{Description, Tags};
use anyhow::anyhow;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
/// A finished activity (with a stop time)
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Activity {
/// Activity start time
start_time: DateTimeW,
/// Activity `stop time` >= `start time`
stop_time: DateTimeW,
/// Activity tags
tags: Tags,
#[serde(default)]
description: Option<Description>,
}
impl Activity {
/// start time getter
pub fn get_start_time(&self) -> DateTimeW {
self.start_time
}
/// stop time getter
pub fn get_stop_time(&self) -> DateTimeW {
self.stop_time
}
/// Return activity duration
pub fn get_duration(&self) -> DurationW {
self.stop_time - self.start_time
}
/// Return activity title (its tags joined by a space)
pub fn get_title(&self) -> String {
self.tags.join(" ")
}
/// Return tags
pub fn get_tags(&self) -> Tags {
self.tags.clone()
}
/// Return Description
pub fn get_description(&self) -> Option<Description> {
self.description.clone()
}
}
/// Activities are sorted by start time
impl Ord for Activity {
fn cmp(&self, other: &Self) -> Ordering {
self.get_start_time().cmp(&other.get_start_time())
}
}
impl PartialOrd for Activity {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
/// A started and unfinished activity (no stop time)
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct OngoingActivity {
/// start time
pub start_time: DateTimeW,
/// Activity tags
pub tags: Tags,
#[serde(default)]
pub description: Option<Description>,
}
/// OngoingActivities are sorted by start time
impl Ord for OngoingActivity {
fn cmp(&self, other: &Self) -> Ordering {
self.get_start_time().cmp(&other.get_start_time())
}
}
impl PartialOrd for OngoingActivity {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl OngoingActivity {
/// Constructor
pub fn new(start_time: DateTimeW, tags: Tags, description: Option<Description>) -> Self {
OngoingActivity {
start_time,
tags,
description,
}
}
/// Start time getter
pub fn get_start_time(&self) -> DateTimeW {
self.start_time
}
/// Return title (activity tags joined by a space)
pub fn get_title(&self) -> String {
self.tags.join(" ")
}
/// Convert active activity to finished activity
/// `stop_time` should be >= `start_time` otherwise error
pub fn into_activity(self, stop_time: DateTimeW) -> anyhow::Result<Activity> {
if self.start_time <= stop_time {
Ok(Activity {
start_time: self.start_time,
stop_time,
tags: self.tags,
description: self.description,
})
} else {
Err(anyhow!(
"stop time ({}) < start_time ({})",
stop_time,
self.start_time
))
}
}
}
/// Check intersection between a finished activity and a date
///
/// Returns Some(activity) if it intersects else None.
pub fn intersect(finished: &Activity, datetimew: &DateTimeW) -> Option<Activity> {
if (&finished.start_time < datetimew) && (datetimew < &finished.stop_time) {
Some(finished.clone())
} else {
None
}
}
/// Check overlap between 2 finished activities
///
/// Returns Some(first) if first activity overlaps with the second else None.
pub fn overlap(finished: &Activity, other: &Activity) -> Option<Activity> {
if finished < other {
intersect(finished, &other.start_time)
} else {
intersect(other, &finished.start_time).map(|_| finished.clone())
}
}
#[cfg(test)]
mod tests {
use crate::rtw_core::activity::{intersect, overlap, Activity};
use chrono::{Local, TimeZone};
#[test]
fn test_intersect() {
let finished = Activity {
start_time: Local
.datetime_from_str("2020-12-25T09:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T10:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
let date = Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into();
assert!(intersect(&finished, &date).is_some());
let date = Local
.datetime_from_str("2020-12-25T10:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into();
assert!(intersect(&finished, &date).is_none());
}
#[test]
fn test_overlap() {
let finished = Activity {
start_time: Local
.datetime_from_str("2020-12-25T09:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T10:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
let other = Activity {
start_time: Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T11:00:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
assert!(overlap(&finished, &other).is_some());
let other = Activity {
start_time: Local
.datetime_from_str("2020-12-25T08:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
assert!(overlap(&finished, &other).is_some());
let other = Activity {
start_time: Local
.datetime_from_str("2020-12-25T08:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T10:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
assert!(overlap(&finished, &other).is_some());
let other = Activity {
start_time: Local
.datetime_from_str("2020-12-25T09:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T09:45:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
assert!(overlap(&finished, &other).is_some());
let other = Activity {
start_time: Local
.datetime_from_str("2020-12-25T10:30:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
stop_time: Local
.datetime_from_str("2020-12-25T11:45:00", "%Y-%m-%dT%H:%M:%S")
.unwrap()
.into(),
tags: vec![],
description: None,
};
assert!(overlap(&finished, &other).is_none());
}
}
<file_sep>//! A service for activities: abstracts activities queries and modifications.
use crate::rtw_core::activity::{Activity, OngoingActivity};
use crate::rtw_core::datetimew::DateTimeW;
use crate::rtw_core::ActivityId;
/// A service for activities
///
/// Abstracts activities queries and modifications
pub trait ActivityService {
/// Get ongoing activities if any
///
/// May fail depending on backend implementation
fn get_ongoing_activities(&self) -> anyhow::Result<Vec<(ActivityId, OngoingActivity)>>;
/// Get ongoing activity with id if any
///
/// May fail depending on backend implementation
fn get_ongoing_activity(&self, id: ActivityId) -> anyhow::Result<Option<OngoingActivity>>;
/// Start a new activity
///
/// May fail depending on backend implementation
///
/// Returns new current activity and optionally the previously ongoing activity
fn start_activity(
&mut self,
activity: OngoingActivity,
deny_overlapping: bool,
) -> anyhow::Result<(OngoingActivity, Option<Activity>)>;
/// Stop current activity
///
/// May fail depending on backend implementation
///
/// Returns stopped activity if any
fn stop_ongoing_activity(
&mut self,
time: DateTimeW,
id: ActivityId,
deny_overlapping: bool,
) -> anyhow::Result<Option<Activity>>;
/// Cancel current activity
///
/// May fail depending on backend implementation
///
/// Returns cancelled activity if any
fn cancel_ongoing_activity(
&mut self,
id: ActivityId,
) -> anyhow::Result<Option<OngoingActivity>>;
/// Filter finished activities
///
/// May fail depending on implementation
///
/// Returns finished activities sorted by start date
///
/// ActivityId: 0 <=> last finished activity
fn filter_activities<P>(&self, p: P) -> anyhow::Result<Vec<(ActivityId, Activity)>>
where
P: Fn(&(ActivityId, Activity)) -> bool;
/// Get all finished activities
///
/// May fail depending on implementation
///
/// Returns finished activities sorted by start date
///
/// ActivityId: 0 <=> last finished activity
fn get_finished_activities(&self) -> anyhow::Result<Vec<(ActivityId, Activity)>>;
/// Delete activity with id
///
/// May fail depending on implementation
///
/// Returns deleted activity if successful
fn delete_activity(&self, id: ActivityId) -> anyhow::Result<Option<Activity>>;
/// Track a finished activity
///
/// May fail depending on backend implementation
///
/// Returns tracked activity if successful
fn track_activity(
&mut self,
activity: Activity,
deny_overlapping: bool,
) -> anyhow::Result<Activity>;
}
<file_sep>//! Newtype on `chrono::Date<Local>`
use crate::rtw_core::durationw::DurationW;
use crate::rtw_core::DATETIME_FMT;
use chrono::{DateTime, Local};
use std::fmt::{Error, Formatter};
use serde::{Deserialize, Serialize};
/// Newtype on `chrono::Date<Local>`
///
/// Date is given in local time for convenience
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct DateTimeW(DateTime<Local>);
impl From<DateTime<Local>> for DateTimeW {
fn from(dt: DateTime<Local>) -> Self {
DateTimeW(dt)
}
}
impl Into<DateTime<Local>> for DateTimeW {
fn into(self) -> DateTime<Local> {
self.0
}
}
impl std::ops::Sub for DateTimeW {
type Output = DurationW;
fn sub(self, rhs: Self) -> Self::Output {
DurationW::new(self.0 - rhs.0)
}
}
impl std::fmt::Display for DateTimeW {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", self.0.format(DATETIME_FMT))
}
}
<file_sep>//! Config.
extern crate config;
use self::config::FileFormat;
use serde::Deserialize;
use serde::Serialize;
use std::path::PathBuf;
const DEFAULT_CONFIG: &str = r#"
{
"timeline_colors": [[183,28,28], [26,35,126], [0,77,64], [38,50,56]],
"deny_overlapping": true
}
"#;
type RGB = (u8, u8, u8);
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub struct RTWConfig {
pub storage_dir_path: PathBuf,
pub timeline_colors: Vec<RGB>,
pub deny_overlapping: bool,
}
impl RTWConfig {
pub fn default() -> Self {
let home_dir = dirs_next::home_dir().expect("could not find home dir");
RTWConfig {
storage_dir_path: home_dir, // stores finished activities
timeline_colors: vec![(183, 28, 28), (26, 35, 126), (0, 77, 64), (38, 50, 56)],
deny_overlapping: true,
}
}
pub fn deny_overlapping(self, deny: bool) -> Self {
RTWConfig {
storage_dir_path: self.storage_dir_path,
timeline_colors: self.timeline_colors,
deny_overlapping: deny,
}
}
}
fn load_config_from_config_dir(
config_dir: &PathBuf,
default_config: RTWConfig,
) -> anyhow::Result<RTWConfig> {
let mut settings = config::Config::default();
let config_path = config_dir.join("rtw").join("rtw_config.json");
let config_path_fallback = config_dir.join("rtw_config.json");
settings
.set_default(
"storage_dir_path",
default_config.storage_dir_path.to_str().unwrap(),
)?
.merge(config::File::from_str(DEFAULT_CONFIG, FileFormat::Json))?
.merge(config::File::with_name(config_path.to_str().unwrap()).required(false))?
.merge(config::File::with_name(config_path_fallback.to_str().unwrap()).required(false))?;
let rtw_config: RTWConfig = settings.try_into()?;
Ok(rtw_config)
}
pub fn load_config() -> anyhow::Result<RTWConfig> {
match dirs_next::config_dir() {
None => Ok(RTWConfig::default()),
Some(config_dir) => load_config_from_config_dir(&config_dir, RTWConfig::default()),
}
}
#[cfg(test)]
mod tests {
use crate::rtw_config::{load_config_from_config_dir, RTWConfig};
use std::fs;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use std::str::FromStr;
use tempfile::tempdir;
#[test]
// make sure the config file in `example` folder is valid
fn example_config_valid() {
let example_config = PathBuf::from_str("example/rtw_config.json").unwrap();
let reader = File::open(example_config);
let config: serde_json::Result<RTWConfig> = serde_json::from_reader(reader.unwrap());
assert!(config.is_ok())
}
#[test]
fn test_config_not_found_in_config_dir() {
let test_config_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_config_dir.path().to_path_buf();
let config = load_config_from_config_dir(&test_dir_path, RTWConfig::default());
assert_eq!(config.unwrap(), RTWConfig::default())
}
#[test]
// .config/rtw_config.json
fn test_config_found_in_config_dir() -> anyhow::Result<()> {
let expected = PathBuf::from_str("/expected").unwrap();
let test_config_dir = tempdir().expect("could not create temp directory");
let mut tmp_config = File::create(test_config_dir.path().join("rtw_config.json"))?;
writeln!(tmp_config, "{{\n\"storage_dir_path\": \"/expected\"\n}}")?;
let config = load_config_from_config_dir(
&test_config_dir.path().to_path_buf(),
RTWConfig::default(),
);
assert_eq!(config.unwrap().storage_dir_path, expected);
Ok(())
}
#[test]
// .config/rtw/rtw_config.json
fn test_config_found_in_sub_config_dir() -> anyhow::Result<()> {
let expected = PathBuf::from_str("/expected").unwrap();
let test_config_dir = tempdir().expect("could not create temp directory");
let test_config_sub_dir = test_config_dir.path().join("rtw");
fs::create_dir(test_config_sub_dir.clone()).expect("could not create temp/rtw directory");
let mut tmp_config = File::create(test_config_sub_dir.join("rtw_config.json"))?;
writeln!(tmp_config, "{{\n\"storage_dir_path\": \"/expected\"\n}}")?;
let config = load_config_from_config_dir(
&test_config_dir.path().to_path_buf(),
RTWConfig::default(),
);
assert_eq!(config.unwrap().storage_dir_path, expected);
Ok(())
}
}
<file_sep># RTW Shell Completion
supported shells: bash, zsh, fish, powershell, elvish
Write completion file for `<shell>` to stdout:
```
rtw completion <shell>
```
## oh-my-zsh
```
.oh-my-zsh/custom/plugins/rtw
├── _rtw
└── rtw.plugin.zsh
```
```
mkdir -p ~/.oh-my-zsh/custom/plugins/rtw
rtw completion zsh > ~/.oh-my-zsh/custom/plugins/rtw/_rtw
echo "#rtw completion plugin" > ~/.oh-my-zsh/custom/plugins/rtw/rtw.plugin.zsh
```
Add `rtw` to `plugins` in `.zshrc`:
```
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(git rtw)
```
<file_sep>//! Clock impl using chrono.
use crate::rtw_core::clock::{Clock, Time};
use crate::rtw_core::datetimew::DateTimeW;
use chrono::{Date, Datelike, Duration, Local};
pub struct ChronoClock {}
impl Clock for ChronoClock {
fn get_time(&self) -> DateTimeW {
chrono::Local::now().into()
}
fn date_time(&self, time: Time) -> DateTimeW {
match time {
Time::Now => self.get_time(),
Time::DateTime(abs_time) => abs_time,
}
}
fn today_range(&self) -> (DateTimeW, DateTimeW) {
let today = chrono::Local::today();
self.day_range(today)
}
fn yesterday_range(&self) -> (DateTimeW, DateTimeW) {
let today = chrono::Local::today();
let yesterday = today - chrono::Duration::days(1); // so proud
self.day_range(yesterday)
}
fn last_week_range(&self) -> (DateTimeW, DateTimeW) {
let today = chrono::Local::today();
let weekday = today.weekday();
let this_week_monday = today - Duration::days(weekday.num_days_from_monday() as i64);
let last_week_monday = this_week_monday - Duration::days(7);
let last_week_sunday = this_week_monday - Duration::days(1);
self.days_range(last_week_monday, last_week_sunday)
}
fn this_week_range(&self) -> (DateTimeW, DateTimeW) {
let today = chrono::Local::today();
let weekday = today.weekday();
let this_week_monday = today - Duration::days(weekday.num_days_from_monday() as i64);
let this_week_sunday = this_week_monday + Duration::days(6);
self.days_range(this_week_monday, this_week_sunday)
}
}
impl ChronoClock {
fn day_range(&self, day: Date<Local>) -> (DateTimeW, DateTimeW) {
self.days_range(day, day)
}
fn days_range(&self, day_start: Date<Local>, day_end: Date<Local>) -> (DateTimeW, DateTimeW) {
(
day_start.and_hms(0, 0, 0).into(),
day_end.and_hms(23, 59, 59).into(),
)
}
}
<file_sep>//! Clock abstraction
use crate::rtw_core::datetimew::DateTimeW;
/// Time (absolute or relative)
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Time {
/// Now, can be converted to `DateTimeW` using `Clock.date_time`
Now,
DateTime(DateTimeW),
}
/// Clock Abstraction
pub trait Clock {
/// Get current local time
fn get_time(&self) -> DateTimeW;
/// Convert a `Time` to absolute time
///
/// `clock.date_time(Time::Now)` equals approximately clock.get_time();
fn date_time(&self, time: Time) -> DateTimeW;
/// Get time range for today
///
/// today: 00:00:00 - 23:59:59
fn today_range(&self) -> (DateTimeW, DateTimeW);
/// Get time range for yesterday
///
/// yesterday: 00:00:00 - 23:59:59
fn yesterday_range(&self) -> (DateTimeW, DateTimeW);
/// Get time range for last week
///
/// last week (ISO 8601, week start on monday)
///
/// last week: monday: 00:00:00 - sunday: 23:59:59
fn last_week_range(&self) -> (DateTimeW, DateTimeW);
/// Get time range for this week
///
/// this week (ISO 8601, week start on monday)
///
/// this week: monday: 00:00:00 - sunday: 23:59:59
fn this_week_range(&self) -> (DateTimeW, DateTimeW);
}
<file_sep>//! CLI parsing helpers and clap App.
use clap::{App, Arg, ArgMatches, SubCommand};
use crate::rtw_core::clock::{Clock, Time};
use crate::rtw_core::datetimew::DateTimeW;
use crate::rtw_core::{ActivityId, Description, Tags};
use crate::time_tools::TimeTools;
use std::str::FromStr;
// 09:00 foo -> (09:00, foo)
// foo -> (Now, foo)
// last friday 8pm foo -> (last friday 8pm, foo)
fn split_time_clue_from_tags(tokens: &[String], clock: &dyn Clock) -> (Time, Tags) {
for at in (0..=tokens.len()).rev() {
let (possibly_time_clue, possibly_tags) = tokens.split_at(at);
let possibly_time_clue_joined: &str = &possibly_time_clue.join(" ");
if TimeTools::is_time(possibly_time_clue_joined) {
let time = TimeTools::time_from_str(possibly_time_clue_joined, clock).unwrap();
return (time, possibly_tags.to_vec());
}
}
(Time::Now, tokens.to_vec())
}
// "09:00 - 10:00 foo" -> (09:00, 10:00, foo)
fn split_time_range_from_tags(
tokens: &[String],
clock: &dyn Clock,
) -> anyhow::Result<(Time, Time, Tags)> {
let separator = "-";
let sp = tokens.splitn(2, |e| e == separator);
let sp: Vec<&[String]> = sp.collect();
match sp.as_slice() {
[range_start, range_end_and_tags] => {
let range_start_maybe = TimeTools::time_from_str(&range_start.join(" "), clock);
let (range_end, activity_tags) = split_time_clue_from_tags(&range_end_and_tags, clock);
match range_start_maybe {
Ok(range_start) => Ok((range_start, range_end, activity_tags)),
Err(e) => Err(anyhow::anyhow!(e)),
}
}
_ => Err(anyhow::anyhow!(
"missing ' - ' between range start and range end? "
)),
}
}
// 09:00 - 10:00 -> (09:00, 10:00)
// 09:00 - -> (09:00, Now)
fn split_time_range(tokens: &[String], clock: &dyn Clock) -> anyhow::Result<(Time, Time)> {
let separator = "-";
let sp = tokens.splitn(2, |e| e == separator);
let sp: Vec<&[String]> = sp.collect();
match sp.as_slice() {
[range_start, range_end] => {
let range_start_maybe = TimeTools::time_from_str(&range_start.join(" "), clock);
let range_end_maybe = if range_end.is_empty() {
Ok(Time::Now)
} else {
TimeTools::time_from_str(&range_end.join(" "), clock)
};
match (range_start_maybe, range_end_maybe) {
(Ok(range_start), Ok(range_end)) => Ok((range_start, range_end)),
_ => Err(anyhow::anyhow!("invalid range")),
}
}
_ => Err(anyhow::anyhow!(
"missing ' - ' between range start and range end? "
)),
}
}
pub fn get_app() -> App<'static, 'static> {
App::new(crate_name!())
.version(crate_version!())
.author("PicoJr")
.about("rust time tracking CLI")
.arg(
Arg::with_name("directory")
.short("d")
.long("dir")
.value_name("DIR")
.required(false)
.help("storage directory")
.hidden(true) // only useful for testing
.takes_value(true),
)
.arg(
Arg::with_name("default")
.long("default")
.required(false)
.help("use default config")
.hidden(true), // only useful for testing
)
.arg(
Arg::with_name("overlap")
.long("overlap")
.required(false)
.conflicts_with("default")
.conflicts_with("no_overlap")
.help("allow overlapping activities"),
)
.arg(
Arg::with_name("no_overlap")
.long("no_overlap")
.required(false)
.conflicts_with("overlap")
.conflicts_with("default")
.help("disallow overlapping activities"),
)
.arg(
Arg::with_name("dry-run")
.short("n")
.long("dry")
.required(false)
.help("dry run: don't write anything to the filesystem"),
)
.subcommand(
SubCommand::with_name("start")
.about("Start new activity")
.arg(
Arg::with_name("tokens")
.multiple(true)
.required(true)
.help(concat!(
"optional time clue followed by at least 1 tag\n",
"e.g '4 min ago foo' or '09:00 foo' or 'foo' "
)),
)
.arg(
Arg::with_name("description")
.short("d")
.long("description")
.takes_value(true)
.help("long activity description"),
),
)
.subcommand(
SubCommand::with_name("track")
.about("Track a finished activity")
.arg(
Arg::with_name("tokens")
.multiple(true)
.required(true)
.help(concat!(
"interval time clue followed by at least 1 tag\n",
"start - end tags...\n",
"e.g '09:00 - 10:00 foo' "
)),
)
.arg(
Arg::with_name("description")
.short("d")
.long("description")
.takes_value(true)
.help("long activity description"),
),
)
.subcommand(
SubCommand::with_name("stop")
.about("Stop activity")
.arg(
Arg::with_name("time")
.multiple(true)
.required(false)
.help(concat!(
"optional time clue e.g. 4min ago\n",
"current time is used when omitted"
)),
)
.arg(
Arg::with_name("id")
.long("id")
.takes_value(true)
.help(concat!(
"optional activity id\n",
"current activity is stopped when omitted"
)),
),
)
.subcommand(
SubCommand::with_name("summary")
.about("Display finished activities")
.arg(
Arg::with_name("tokens")
.multiple(true)
.required(false)
.conflicts_with_all(&["yesterday", "lastweek", "week"])
.help(concat!(
"optional interval time clue\n",
"start - end\n",
"e.g '09:00 - 10:00' "
)),
)
.arg(
Arg::with_name("yesterday")
.long("yesterday")
.help("activities done yesterday"),
)
.arg(
Arg::with_name("lastweek")
.long("lastweek")
.help("activities done last week"),
)
.arg(
Arg::with_name("week")
.long("week")
.help("activities done this week"),
)
.arg(
Arg::with_name("id")
.long("id")
.help("display activities id"),
)
.arg(
Arg::with_name("description")
.short("d")
.long("description")
.help("display activities descriptions"),
)
.arg(
Arg::with_name("report")
.short("r")
.long("report")
.help("sum up activities with same tag together"),
),
)
.subcommand(
SubCommand::with_name("dump")
.about("Dump finished activities to stdout in iCalendar format")
.after_help(concat!(
"examples:\n",
"rtw dump > today.ics\n",
"rtw dump --lastweek > lastweek.ics\n",
"rtw dump last friday - now > recent.ics\n"
))
.arg(
Arg::with_name("tokens")
.multiple(true)
.required(false)
.conflicts_with_all(&["yesterday", "lastweek", "week"])
.help(concat!(
"optional interval time clue\n",
"start - end\n",
"e.g '09:00 - 10:00' "
)),
)
.arg(
Arg::with_name("yesterday")
.long("yesterday")
.help("activities done yesterday"),
)
.arg(
Arg::with_name("lastweek")
.long("lastweek")
.help("activities done last week"),
)
.arg(
Arg::with_name("week")
.long("week")
.help("activities done this week"),
),
)
.subcommand(
SubCommand::with_name("continue")
.about("Continue a finished activity")
.arg(
Arg::with_name("id")
.required(false)
.help("activity id (when id is not provided continue last activity)"),
),
)
.subcommand(SubCommand::with_name("day").about("Display the current day as a timeline"))
.subcommand(SubCommand::with_name("week").about("Display the current week as a timeline"))
.subcommand(
SubCommand::with_name("timeline")
.about("Display finished activities as a timeline")
.arg(
Arg::with_name("tokens")
.multiple(true)
.required(false)
.help(concat!(
"optional interval time clue\n",
"start - end\n",
"e.g 'last monday - now' "
)),
),
)
.subcommand(
SubCommand::with_name("delete")
.about("Delete activity")
.arg(Arg::with_name("id").required(true).help("activity id")),
)
.subcommand(
SubCommand::with_name("cancel")
.about("cancel current activity")
.arg(
Arg::with_name("id")
.long("id")
.takes_value(true)
.help(concat!(
"optional activity id\n",
"current activity is stopped when omitted"
)),
),
)
.subcommand(
SubCommand::with_name("completion")
.about("generate completion file")
.arg(
Arg::with_name("shell")
.possible_values(&["bash", "zsh", "fish", "powershell", "elvish"])
.takes_value(true)
.required(true),
),
)
}
pub fn parse_start_args(
start_m: &ArgMatches,
clock: &dyn Clock,
) -> anyhow::Result<(Time, Tags, Option<Description>)> {
let description = start_m.value_of("description").map(|s| s.to_string());
let values_arg = start_m.values_of("tokens"); // optional time clue, tags
if let Some(values) = values_arg {
let values: Tags = values.map(String::from).collect();
let (time, tags) = split_time_clue_from_tags(&values, clock);
return if tags.is_empty() {
Err(anyhow::anyhow!("no tags provided"))
} else {
Ok((time, tags, description))
};
}
Err(anyhow::anyhow!("neither time clue nor tags provided")) // it should be prevented by clap
}
pub fn parse_track_args(
track_m: &ArgMatches,
clock: &dyn Clock,
) -> anyhow::Result<(Time, Time, Tags, Option<Description>)> {
let description = track_m.value_of("description").map(|s| s.to_string());
let values_arg = track_m
.values_of("tokens")
.expect("start time, end time and at least 1 tag required");
let values: Tags = values_arg.map(String::from).collect();
let (range_start, range_end, activity_tags) = split_time_range_from_tags(&values, clock)?;
Ok((range_start, range_end, activity_tags, description))
}
pub fn parse_stop_args(
stop_m: &ArgMatches,
clock: &dyn Clock,
) -> anyhow::Result<(Time, Option<ActivityId>)> {
let stopped_id_maybe = stop_m
.value_of("id")
.map(|id_str| usize::from_str(id_str))
.transpose()?;
let time_arg = stop_m.values_of("time");
if let Some(values) = time_arg {
let values: Vec<String> = values.map(String::from).collect();
let time_str = values.join(" ");
let stop_time = TimeTools::time_from_str(&time_str, clock)?;
Ok((stop_time, stopped_id_maybe))
} else {
Ok((Time::Now, stopped_id_maybe))
}
}
pub fn parse_continue_args(continue_m: &ArgMatches) -> anyhow::Result<Option<ActivityId>> {
let continue_id_maybe = continue_m
.value_of("id")
.map(|id_str| usize::from_str(id_str))
.transpose()?;
Ok(continue_id_maybe)
}
pub fn parse_cancel_args(cancel_m: &ArgMatches) -> anyhow::Result<Option<ActivityId>> {
let cancelled_id_maybe = cancel_m
.value_of("id")
.map(|id_str| usize::from_str(id_str))
.transpose()?;
Ok(cancelled_id_maybe)
}
pub fn parse_summary_args(
summary_m: &ArgMatches,
clock: &dyn Clock,
) -> anyhow::Result<((DateTimeW, DateTimeW), bool, bool, bool)> {
let display_id = summary_m.is_present("id");
let report = summary_m.is_present("report");
let display_description = summary_m.is_present("description");
let values_arg = summary_m.values_of("tokens");
if let Some(values) = values_arg {
let values: Vec<String> = values.map(String::from).collect();
let range_maybe = split_time_range(&values, clock);
return match range_maybe {
Ok((range_start, range_end)) => {
let range_start = clock.date_time(range_start);
let range_end = clock.date_time(range_end);
Ok((
(range_start, range_end),
display_id,
display_description,
report,
))
}
Err(e) => Err(anyhow::anyhow!(e)),
};
}
let range = {
if summary_m.is_present("yesterday") {
clock.yesterday_range()
} else if summary_m.is_present("lastweek") {
clock.last_week_range()
} else if summary_m.is_present("week") {
clock.this_week_range()
} else {
clock.today_range()
}
};
Ok((range, display_id, display_description, report))
}
pub fn parse_timeline_args(
timeline_m: &ArgMatches,
clock: &dyn Clock,
) -> anyhow::Result<((DateTimeW, DateTimeW), bool)> {
let display_id = timeline_m.is_present("id");
let values_arg = timeline_m.values_of("tokens");
if let Some(values) = values_arg {
let values: Vec<String> = values.map(String::from).collect();
let range_maybe = split_time_range(&values, clock);
match range_maybe {
Ok((range_start, range_end)) => {
let range_start = clock.date_time(range_start);
let range_end = clock.date_time(range_end);
Ok(((range_start, range_end), display_id))
}
Err(e) => Err(anyhow::anyhow!(e)),
}
} else {
Ok((clock.today_range(), display_id))
}
}
pub fn parse_delete_args(delete_m: &ArgMatches) -> anyhow::Result<ActivityId> {
let id_opt = delete_m
.value_of("id")
.map(|id_str| usize::from_str(id_str));
if let Some(Ok(id)) = id_opt {
Ok(id)
} else {
Err(anyhow::anyhow!("could not parse id"))
}
}
pub fn parse_completion_args(completion_m: &ArgMatches) -> anyhow::Result<clap::Shell> {
let shell_maybe = completion_m.value_of("shell");
match shell_maybe {
Some("bash") => Ok(clap::Shell::Bash),
Some("zsh") => Ok(clap::Shell::Zsh),
Some("fish") => Ok(clap::Shell::Fish),
Some("powershell") => Ok(clap::Shell::PowerShell),
Some("elvish") => Ok(clap::Shell::Elvish),
None => Err(anyhow::anyhow!("missing shell")), // should never happen thanks to clap check
_ => Err(anyhow::anyhow!("invalid shell")), // should never happen thanks to clap check
}
}
#[cfg(test)]
mod tests {
use crate::chrono_clock::ChronoClock;
use crate::cli_helper::{
split_time_clue_from_tags, split_time_range, split_time_range_from_tags,
};
use crate::rtw_core::clock::Time;
use crate::rtw_core::Tags;
use crate::time_tools::TimeTools;
#[test]
// rtw start
fn test_split_time_clue_from_tags_0_0() {
let clock = ChronoClock {};
let values: Tags = vec![];
let (time, tags) = split_time_clue_from_tags(&values, &clock);
assert_eq!(Time::Now, time);
assert!(tags.is_empty());
}
#[test]
// rtw start foo
fn test_split_time_clue_from_tags_0_1() {
let clock = ChronoClock {};
let values: Tags = vec![String::from("foo")];
let (time, tags) = split_time_clue_from_tags(&values, &clock);
assert_eq!(Time::Now, time);
assert_eq!(tags, values);
}
#[test]
// rtw start foo bar
fn test_split_time_clue_from_tags_0_2() {
let clock = ChronoClock {};
let values: Tags = vec![String::from("foo"), String::from("bar")];
let (time, tags) = split_time_clue_from_tags(&values, &clock);
assert_eq!(Time::Now, time);
assert_eq!(tags, values);
}
#[test]
// rtw start 1 h ago
fn test_split_time_clue_from_tags_3_0() {
let clock = ChronoClock {};
let values: Tags = vec![String::from("1"), String::from("h"), String::from("ago")];
let (time, tags) = split_time_clue_from_tags(&values, &clock);
assert_ne!(Time::Now, time);
assert!(tags.is_empty());
}
#[test]
// rtw start 1 h ago foo
fn test_split_time_clue_from_tags_3_1() {
let clock = ChronoClock {};
let tokens: Vec<String> = vec![
String::from("1"),
String::from("h"),
String::from("ago"),
String::from("foo"),
];
let (time, tags) = split_time_clue_from_tags(&tokens, &clock);
assert_ne!(Time::Now, time);
assert_eq!(tags, vec![String::from("foo")]);
}
#[test]
// rtw track 09:00 - 10:00 foo
fn test_split_time_range_from_tags_1_1_1() {
let clock = ChronoClock {};
let tokens: Vec<String> = vec![
String::from("09:00"),
String::from("-"),
String::from("10:00"),
String::from("foo"),
];
let time_range_and_tags = split_time_range_from_tags(&tokens, &clock);
assert!(time_range_and_tags.is_ok());
}
#[test]
// rtw summary 09:00 - 10:00
fn test_split_range_1_1() {
let clock = ChronoClock {};
let tokens: Vec<String> = vec![
String::from("09:00"),
String::from("-"),
String::from("10:00"),
];
let time_range = split_time_range(&tokens, &clock);
assert!(time_range.is_ok());
let time_range = time_range.unwrap();
assert_eq!(
time_range.0,
TimeTools::time_from_str("09:00", &clock).unwrap()
);
assert_eq!(
time_range.1,
TimeTools::time_from_str("10:00", &clock).unwrap()
);
}
#[test]
// rtw summary 09:00 -
fn test_split_range_1_0() {
let clock = ChronoClock {};
let tokens: Vec<String> = vec![String::from("09:00"), String::from("-")];
let time_range = split_time_range(&tokens, &clock);
assert!(time_range.is_ok());
assert_eq!(time_range.unwrap().1, Time::Now)
}
}
<file_sep># Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2.2.0](https://crates.io/crates/rtw/2.2.0) Unreleased
* Add `<id>` optional parameter to `continue`.
* Add tip for using `--overlap` when activities are overlapping.
* Clean timeline legend.
* Use `dirs-next` instead of unmaintained `dirs`.
* Bump dependencies.
## [2.1.0](https://crates.io/crates/rtw/2.1.0) Dec 8, 2020
* Add `--report` option to summary command.
## [2.0.1](https://crates.io/crates/rtw/2.0.1) Nov 3, 2020
* Fix CLI output for Windows 10 cf [#43](https://github.com/PicoJr/rtw/pull/43) thanks [ythri](https://github.com/ythri)
## [2.0.0](https://crates.io/crates/rtw/2.0.0) Jul 30, 2020
* Stabilize multiple ongoing activities
* Stabilize long descriptions
## 2.0.0-rc1 (not released on crates.io) Jul 20, 2020
* Timeline now displays ongoing activities.
## [2.0.0-beta](https://crates.io/crates/rtw/2.0.0-beta) Jul 16, 2020
This version is mostly backward compatible with previous `rtw` data,
please discard ongoing activities (remove `~/.rtw.json`) before using this version.
However previous versions of `rtw` will not work on data generated by this version.
* Support multiple ongoing activities.
* Timeline now supports overlapping activities (experimental).
* `stop` and `cancel` now have a `--id` optional parameter in order to disambiguate multiple ongoing activities.
* Add `deny_overlapping` option to `rtw` config.
* the json containing finished activities now also contains the `rtw` version.
* add `-d` `--description` option for `start`, `track` and `summary` cf [#40](https://github.com/PicoJr/rtw/issues/40).
* when provided, descriptions are used when exporting to calendar
## [2.0.0-alpha.1](https://crates.io/crates/rtw/2.0.0-alpha.1) Jul 12, 2020
* bump `htp` to 0.2.1 (fix `next <weekday>`)
## [2.0.0-alpha](https://crates.io/crates/rtw/2.0.0-alpha) Jul 5, 2020
* Replace `chrono-english` with `htp`.
* Fix [#37](https://github.com/PicoJr/rtw/issues/37)
## [1.5.0](https://crates.io/crates/rtw/1.5.0) Jun 21, 2020
* add `completion <shell>` command.
`rtw completion <shell>` generates completion file for `<shell>`
## [1.4.1](https://crates.io/crates/rtw/1.4.1) Jun 19, 2020
* Fix timeline crash when activity spans over several days (#33)
## [1.4.0](https://crates.io/crates/rtw/1.3.1) Jun 17, 2020
* Add `dump` subcommand, dumps finished activities to [ICalendar](https://en.wikipedia.org/wiki/ICalendar).
## [1.3.1](https://crates.io/crates/rtw/1.3.1) Jun 16, 2020
* Fix timeline crash when activity is too short to be displayed (#28)
## [1.3.0](https://crates.io/crates/rtw/1.3.0) Jun 13, 2020
* Add multiline timeline
## [1.2.2](https://crates.io/crates/rtw/1.2.2) Jun 09, 2020
* Add `-n` dry-run option.
## [1.2.1](https://crates.io/crates/rtw/1.2.1) Jun 07, 2020
* Add warning: CLI usage stable but not `lib.rs` content.
* Fix doc.rs build issue (restore `lib.rs`).
## [1.2.0](https://crates.io/crates/rtw/1.2.0) Jun 07, 2020
* add `cancel` subcommand.
* deny overlapping activities
* add `timeline` subcommand.
* timeline colors can be configured in `rtw_config.json`
* add `day` subcommand (display timeline for the current day)
* add `week` subcommand (display timeline for the current week)
## [1.1.0](https://crates.io/crates/rtw/1.1.0) Mar 22, 2020
### Added
* add config using [config-rs](https://docs.rs/crate/config/0.10.1).
### Changed
* activities title are no longer truncated in summary
### Github CI
* Add platforms: `macos-latest`, `windows-latest` (see [rust.yml](.github/workflows/rust.yml)).
## [1.0.0](https://crates.io/crates/rtw/1.0.0) Mar 16, 2020
### Added
* crate [chrono-english](https://docs.rs/chrono-english/) for time parsing see [commands](commands.md).
* more unit and integration tests
* `summary --week` option
* `summary range_start - range_end` syntax
### Fixed
* Duration display bug: 1h was displayed as `01:60:3600` instead of `01:00:00`
### Breaking API Changes
`rtw` now uses the crate [chrono-english](https://docs.rs/chrono-english/) for time parsing.
As a result `rtw` now support the following [formats](https://docs.rs/chrono-english/#supported-formats) when supplying time hints.
The following syntax are not supported anymore:
* `rtw start 4m foo`, use `rtw start 4m ago foo` instead.
* `rtw stop 4m`, use `rtw stop 4m ago` instead.
* `rtw track 2019-12-25T19:43:00 2019-12-25T19:45:00 write doc`, use `rtw track 2019-12-25T19:43:00 - 2019-12-25T19:45:00 write doc` instead
## [0.2.1](https://crates.io/crates/rtw/0.2.1) Mar 8, 2020
### Fixed
* fix cargo-audit warning on `quote:1.0.2` being yanked
### Removed
* ram-only implementations
## [0.2.0](https://crates.io/crates/rtw/0.2.0) Dec 31, 2019
### Added
* `track` command
* `delete` command
* `summary --id` option
* doc test
* `continue` command
* `CHANGELOG.md`
* `commands.md`
* `summary --lastweek` option
* github action
* badges
### Changed
* `AbsTime` renamed to `DateTimeW`
* `ActiveActivity` renamed to `OngoingActivity`
### Fixed
* `summary` output is now sorted by start date
* `tempfile` and `assert_cmd` no longer required for build
* CLI version now matches `Cargo.toml` version
## [0.1.1](https://crates.io/crates/rtw/0.1.1) Dec 26, 2019
### Added
* repository url in `Cargo.toml`
## [0.1.0](https://crates.io/crates/rtw/0.1.0) Dec 26, 2019
### Added
* `start` command
* `stop` command
* `summary` command
<file_sep>//! Time parsing utils.
use crate::rtw_core::clock::{Clock, Time};
use anyhow::anyhow;
use chrono::Local;
use htp::parse;
pub struct TimeTools {}
impl TimeTools {
pub fn is_time(s: &str) -> bool {
parse(s, Local::now()).is_ok()
}
pub fn time_from_str(s: &str, clock: &dyn Clock) -> anyhow::Result<Time> {
match parse(s, clock.get_time().into()) {
Ok(dt) => Ok(Time::DateTime(dt.into())),
Err(e) => Err(anyhow!(e.to_string())),
}
}
}
<file_sep>#[cfg(test)]
mod tests {
use assert_cmd::Command;
use tempfile::tempdir;
const NO_ACTIVE_TIME_TRACKING: &str = "There is no active time tracking.\n";
const NO_FILTERED_DATA_FOUND: &str = "No filtered data found.\n";
#[test]
fn no_args() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.assert()
.success()
.stdout(NO_ACTIVE_TIME_TRACKING);
}
#[test]
fn summary_none() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("summary")
.assert()
.success()
.stdout(NO_FILTERED_DATA_FOUND);
}
#[test]
fn summary_none_with_id() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("summary")
.arg("--id")
.assert()
.success()
.stdout(NO_FILTERED_DATA_FOUND);
}
#[test]
fn summary_none_with_range() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("summary")
.arg("09:00")
.arg("-")
.arg("10:00")
.assert()
.success()
.stdout(NO_FILTERED_DATA_FOUND);
}
#[test]
fn summary_something_with_range() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("summary")
.arg("08:00")
.arg("-")
.arg("11:00")
.assert()
.success();
}
#[test]
fn summary_something_with_report() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("10:00")
.arg("-")
.arg("11:00")
.arg("foo")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("summary")
.arg("--report")
.arg("08:00")
.arg("-")
.arg("12:00")
.assert()
.success()
.stdout(predicates::str::contains("foo 02:00:00 (2 segments)\n"));
}
#[test]
fn dump_ical_nothing() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("dump")
.assert()
.success();
}
#[test]
fn dump_ical_something() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("dump")
.assert()
.success()
.stdout(predicates::str::contains("SUMMARY:foo"));
}
#[test]
fn timeline_day_nothing() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("day")
.assert()
.success();
}
#[test]
fn timeline_week_nothing() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("week")
.assert()
.success();
}
#[test]
fn timeline_nothing() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("timeline")
.arg("09:00")
.arg("-")
.arg("10:00")
.assert()
.success();
}
#[test]
fn timeline_several_day_span() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("2019-12-24T19:43:00")
.arg("-")
.arg("2019-12-26T10:45:00")
.arg("christmas")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("timeline")
.arg("2019-12-20T10:43:00")
.arg("-")
.arg("2019-12-30T10:43:00")
.assert()
.success();
}
#[test]
fn continue_none() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("continue")
.assert()
.success()
.stdout("No activity to continue from.\n");
}
#[test]
fn continue_last() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("continue")
.assert()
.success()
.stdout("Tracking foo\n");
}
#[test]
fn continue_id_1() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("10:00")
.arg("-")
.arg("11:00")
.arg("unexpected")
.assert()
.success();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("continue")
.arg("1") // next to last
.assert()
.success()
.stdout("Tracking foo\n");
}
#[test]
fn delete_none() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("delete")
.arg("42")
.assert()
.success()
.stdout("No activity found for id 42.\n");
}
#[test]
fn delete_some() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("delete")
.arg("0")
.assert()
.success()
.stdout(predicates::str::contains("Deleted foo"));
}
#[test]
fn start_now() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("foo")
.assert()
.success();
}
#[test]
fn start_then_stop() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("stop")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
#[test]
fn start_then_cancel() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("cancel")
.assert()
.success()
.stdout(predicates::str::contains("Cancelled foo"));
}
#[test]
fn start_then_stop_then_delete() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("stop")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("delete")
.arg("0")
.assert()
.success()
.stdout(predicates::str::contains("Deleted foo"));
}
#[test]
fn track_date_missing_separator() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("2019-12-25T19:43:00")
.arg("2019-12-25T19:45:00")
.arg("foo")
.assert()
.failure();
}
#[test]
fn track_date() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("2019-12-25T19:43:00")
.arg("-")
.arg("2019-12-25T19:45:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
#[test]
fn track_relative() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("10 min ago")
.arg("-")
.arg("5 min ago")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
#[test]
fn track_relative_time() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
#[test]
fn track_relative_time_am_pm() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09am")
.arg("-")
.arg("10am")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
#[test]
fn track_relative_missing_end() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("10 min ago")
.arg("-")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
#[test]
fn track_relative_missing_start_and_end() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("-")
.arg("foo")
.assert()
.failure();
}
#[test]
fn track_overlap_not_allowed() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("--no_overlap") // deny overlapping
.arg("track")
.arg("09:30")
.arg("-")
.arg("11:00")
.arg("bar")
.assert()
.failure()
.stderr(predicates::str::contains("would overlap"));
}
#[test]
fn track_overlap_allowed() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("track")
.arg("09:00")
.arg("-")
.arg("10:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("--overlap") // deny overlapping
.arg("track")
.arg("09:30")
.arg("-")
.arg("11:00")
.arg("bar")
.assert()
.success()
.stdout(predicates::str::contains("Recorded bar"));
}
#[test]
fn start_nothing_now() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.assert()
.failure();
}
#[test]
fn start_nothing_15min_ago() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("15min")
.arg("ago")
.assert()
.failure();
}
#[test]
fn start_foo_15min_ago() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("15min")
.arg("ago")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
}
#[test]
fn start_foo_today_at_9() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("09:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
}
#[test]
fn start_foo_on_datetime() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("2019-12-24T19:43:00")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
}
#[test]
fn stop_nothing_now() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("stop")
.assert()
.success()
.stdout(NO_ACTIVE_TIME_TRACKING);
}
#[test]
fn stop_foo_5min_ago() {
let test_dir = tempdir().expect("could not create temp directory");
let test_dir_path = test_dir.path().to_str().unwrap();
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("start")
.arg("10")
.arg("min")
.arg("ago")
.arg("foo")
.assert()
.success()
.stdout(predicates::str::contains("Tracking foo"));
let mut cmd = Command::cargo_bin("rtw").unwrap();
cmd.arg("-d")
.arg(test_dir_path)
.arg("stop")
.arg("5")
.arg("min")
.arg("ago")
.assert()
.success()
.stdout(predicates::str::contains("Recorded foo"));
}
}
<file_sep>//! Timeline display
use crate::rtw_core::activity::{Activity, OngoingActivity};
use crate::rtw_core::durationw::DurationW;
use crate::rtw_core::ActivityId;
use ansi_term::{Color, Style};
use anyhow::anyhow;
use chrono::{DateTime, Datelike, Duration, Local, Timelike};
use std::cmp::max;
use std::iter::FromIterator;
use tbl::{Block, Bound, RenderBlock, Renderer, TBLError};
type RGB = (u8, u8, u8);
type Label = (String, RGB);
type Interval = (ActivityId, Activity);
const DEFAULT_TERMINAL_SIZE: usize = 90;
fn chunkify(s: &str, size: usize) -> Vec<String> {
if size == 0 {
vec![]
} else {
let inter: Vec<char> = s.chars().collect();
let chunks = inter.chunks_exact(size);
let remainder = chunks.remainder().to_vec();
let padding: Vec<char> = std::iter::repeat(' ')
.take(size - remainder.len())
.collect();
let padded_remainder: Vec<char> = remainder.iter().chain(padding.iter()).cloned().collect();
let chunks: Vec<String> = chunks
.chain(std::iter::once(padded_remainder.as_slice()))
.map(|s| String::from_iter(s.iter()))
.collect();
chunks
}
}
fn split_interval_if_needed(interval: &Interval) -> (Interval, Option<Interval>) {
let (activity_id, activity) = interval;
let start_time: DateTime<Local> = activity.get_start_time().into();
let stop_time: DateTime<Local> = activity.get_stop_time().into();
let day_span: i32 = stop_time.num_days_from_ce() - start_time.num_days_from_ce();
if day_span < 1 {
(interval.clone(), None) // activity start time and stop time same day
} else {
let same_day_midnight: DateTime<Local> = start_time.date().and_hms_milli(23, 59, 59, 999);
// Paranoia in case 23:59:59:999 < start_time < midnight
let (same_day_start, same_day_end) = if start_time < same_day_midnight {
(start_time, same_day_midnight)
} else {
(same_day_midnight, start_time)
};
let same_day_to_midnight = OngoingActivity::new(
same_day_start.into(),
activity.get_tags(),
activity.get_description(),
)
.into_activity(same_day_end.into())
.unwrap(); // safe to unwrap thanks to previous test: same_day_start <= same_day_end
let day_after: DateTime<Local> = start_time.date().and_hms(0, 0, 0) + Duration::days(1);
let other_days = OngoingActivity::new(
day_after.into(),
activity.get_tags(),
activity.get_description(),
)
.into_activity(stop_time.into())
.unwrap(); // safe to unwrap because stop_time >= day_after
(
(*activity_id, same_day_to_midnight),
Some((*activity_id, other_days)),
)
}
}
fn split_interval(interval: &Interval) -> Vec<Interval> {
match split_interval_if_needed(interval) {
(i, None) => vec![i],
(i, Some(other)) => std::iter::once(i).chain(split_interval(&other)).collect(),
}
}
fn render(b: &Block<(String, (u8, u8, u8))>) -> RenderBlock {
match b {
Block::Space(size) => RenderBlock::Space(" ".repeat(*size)),
Block::Segment(size, label) => {
let (label, (r, g, b)) = label.clone().unwrap_or((String::default(), (0, 0, 0)));
let chunks = chunkify(&label, *size);
let style = Style::new().on(Color::RGB(r, g, b));
let color_chunks = chunks.iter().map(|s| style.paint(s).to_string()).collect();
RenderBlock::MultiLineBlock(color_chunks)
}
}
}
fn color(id: ActivityId, colors: &[RGB]) -> RGB {
let color = colors.get(id % colors.len());
match color {
None => (0, 0, 0),
Some(c) => *c,
}
}
fn bounds(interval: &Interval) -> (f64, f64) {
let (_, activity) = interval;
let start_time: DateTime<Local> = activity.get_start_time().into();
let stop_time: DateTime<Local> = activity.get_stop_time().into();
(
start_time.num_seconds_from_midnight() as f64,
stop_time.num_seconds_from_midnight() as f64,
)
}
// label for activities
fn label(interval: &Interval, colors: &[RGB]) -> Option<Label> {
let (activity_id, activity) = interval;
Some((activity.get_title(), color(*activity_id, colors)))
}
// label for legend
fn legend(interval: &Interval) -> Option<Label> {
let (_activity_id, activity) = interval;
let start_time: DateTime<Local> = activity.get_start_time().into();
let end_time: DateTime<Local> = activity.get_stop_time().into();
Some((
format!(
"{}-{}",
start_time.format("%H:%M"),
end_time.format("%H:%M")
),
(0, 0, 0),
))
}
// day total activities duration
fn day_total(activities: &[Interval]) -> Duration {
activities
.iter()
.map(|(_, a)| {
let duration: Duration = a.get_duration().into();
duration
})
.fold(Duration::seconds(0), |total, duration| total + duration)
}
// earliest and latest activity
fn day_bounds(activities: &[Interval]) -> Bound {
let min_second: f64 = activities
.iter()
.map(|(_, a)| {
let start: DateTime<Local> = a.get_start_time().into();
start.num_seconds_from_midnight()
})
.min()
.unwrap_or(0) as f64;
let max_second = activities
.iter()
.map(|(_, a)| {
let stop: DateTime<Local> = a.get_stop_time().into();
stop.num_seconds_from_midnight()
})
.max()
.unwrap_or(86_400) as f64;
(min_second, max_second)
}
// min and max day
fn days(activities: &[Interval]) -> (i32, i32) {
let min_day = activities
.iter()
.map(|(_, a)| {
let start: DateTime<Local> = a.get_start_time().into();
start.num_days_from_ce()
})
.min()
.unwrap_or(0);
let max_day = activities
.iter()
.map(|(_, a)| {
let stop: DateTime<Local> = a.get_stop_time().into();
stop.num_days_from_ce()
})
.max()
.unwrap_or(0);
(min_day, max_day)
}
pub(crate) fn render_days(activities: &[Interval], colors: &[RGB]) -> anyhow::Result<Vec<String>> {
let (width, _height) = term_size::dimensions().unwrap_or((DEFAULT_TERMINAL_SIZE, 0));
let (min_second, max_second) = day_bounds(activities);
let (min_day, max_day) = days(activities);
let mut rendered: Vec<String> = vec![];
for day in min_day..=max_day {
let day_activities: Vec<Interval> = activities
.iter()
.flat_map(|interval| split_interval(interval))
.filter(|(_, a)| {
let start_time: DateTime<Local> = a.get_start_time().into();
start_time.num_days_from_ce() == day
})
.collect();
let day_month = day_activities
.first()
.map(|(_, a)| {
let start_time: DateTime<Local> = a.get_start_time().into();
start_time.format("%d/%m").to_string()
})
.unwrap_or_else(|| "??/??".to_string());
let total: DurationW = DurationW::from(day_total(day_activities.as_slice()));
let total_string = total.to_string();
let right_padding = total_string.len() + 1; // +1 space
let available_length = max(0, width - right_padding as usize) as usize;
let data = Renderer::new(day_activities.as_slice(), &bounds, &|a| label(a, colors))
.with_renderer(&render)
.with_length(available_length)
.with_boundaries((min_second, max_second))
.render()
.map_err(|e| match e {
TBLError::NoBoundaries => anyhow!("failed to create timeline"),
TBLError::Intersection(left, right) => anyhow!(
"failed to create timeline: some activities are overlapping: {:?} intersects {:?}", left, right
),
})?;
let legend = Renderer::new(day_activities.as_slice(), &bounds, &legend)
.with_renderer(&render)
.with_length(available_length)
.with_boundaries((min_second, max_second))
.render()
.map_err(|e| match e {
TBLError::NoBoundaries => anyhow!("failed to create timeline"),
TBLError::Intersection(left, right) => anyhow!(
"failed to create timeline: some activities are overlapping: {:?} intersects {:?}", left, right
),
})?;
let timeline = legend.iter().zip(data.iter());
for (legend_timelines, data_timelines) in timeline {
for (j, line) in legend_timelines.iter().enumerate() {
if j == 0 {
rendered.push(format!("{}{:>8}", line, day_month));
} else {
rendered.push(format!("{}{:>8}", line, " ".to_string()));
}
}
for (j, line) in data_timelines.iter().enumerate() {
if j == 0 {
rendered.push(format!("{}{}", line, total_string));
} else {
rendered.push(format!("{}{:>8}", line, " ".to_string()));
}
}
}
}
Ok(rendered)
}
<file_sep>[](https://crates.io/crates/rtw)
[](https://docs.rs/rtw)
[](https://github.com/PicoJr/rtw/blob/master/LICENSE)
|Branch|Status|
|------|------|
|[master](https://github.com/PicoJr/rtw/tree/master)||
|[dev](https://github.com/PicoJr/rtw/tree/dev) ||
# RTW - Rust Time Watcher
Command-line interface (CLI) time tracker.
CLI usage is stable, underlying API is **not stable**.
> Note: This software is built specifically as a productivity tool for myself,
> not as a consumer resource. I cannot commit a large amount of time to maintaining this
> software but I'll do my best to provide support if something fails =).
This project is heavily inspired from [Timewarrior](https://github.com/GothenburgBitFactory/timewarrior).
> For a stable feature-rich CLI time tracker, please use Timewarrior: https://timewarrior.net/.
## Why another time tracker tool?
1. learn Rust
2. I once lost a month worth of data with another time tracker tool (database corruption)...never again!
## Install
Supported OS: Linux, MacOS, Windows
CI runs on `ubuntu-latest`, `macos-latest`, `windows-latest`.
Note: Windows support is only experimental. Some features may not be supported on Windows.
### Cargo
```
cargo install rtw
```
### Build From Source
rtw compiles with Rust 1.42.0 (stable) or newer.
Clone and build from source:
```
git clone https://github.com/PicoJr/rtw.git
cd rtw
cargo build --release
```
### From binaries (Linux only)
Download the corresponding archive from the [Release page](https://github.com/picojr/rtw/releases).
### Shell Completion (Bash, Zsh, Fish, Powershell, Elvish)
Please see [shell completion](shell-completion.md).
## Changelog
Please see the [CHANGELOG](CHANGELOG.md) for a release history.
## Basic Usage
### Start tracking an activity
Example:
```bash
rtw start "learn rust"
```
Example output:
```
Tracking learn rust
Started 2019-12-25T19:43:00
```
### Display current activity
``` bash
rtw
```
Example output:
```
Tracking learn rust
Total 01:15:00
```
### Stop current activity
```bash
rtw stop
```
Example output:
```
Recorded learn rust
Started 2019-12-25T19:43:00
Ended 2019-12-25T21:00:00
Total 01:17:000
```
### Display the day's activity summary
```bash
rtw summary
```
Example output:
```
read the doc 2019-12-25T11:49:30 2019-12-25T11:53:36 00:04:246
eat cookies 2019-12-25T12:08:49 2019-12-25T12:12:14 00:03:204
```
### Display a timeline for the day
```bash
rtw day
```
Example output (YMMV):

### More?
For further details see [Full Usage](commands.md).
## Configuration
RTW doesn't create the config file for you, but it looks for one in the following locations (in this order):
1. `$XDG_CONFIG_HOME/rtw/rtw_config.json`
2. `$HOME/.config/rtw/rtw_config.json`
3. `$XDG_CONFIG_HOME/.config/rtw_config.json`
4. `$HOME/.config/rtw_config.json`
see `example` folder for a default config file.
## Development
Development occurs on `dev`, releases are made on `master` branch.
## Implementation
RTW relies on json files for persistence.
Default location is the home (`~`) directory.
```
~/.rtw.json # stores current activity
~/.rtwh.json # stores finished activities
```
**there is currently no file locking mechanism**: running several `rtw` commands at the same time
may lead to undefined behavior.
## Similar Tools
* [timewarrior](https://github.com/GothenburgBitFactory/timewarrior)
* [watson](https://github.com/TailorDev/Watson)
* [jobrog](https://github.com/dfhoughton/jobrog)
* [doug](https://github.com/chdsbd/doug)
* ...
| 791584675afb772b7822c125645d073f6f950147 | [
"TOML",
"Rust",
"Markdown",
"Shell"
] | 25 | Rust | PicoJr/rtw | f803e85940eb2f68a445b3f1a5401f7a23e71f38 | e4182baaf95f3692207f4ff306e1a98e30303c6f |
refs/heads/main | <file_sep>const WebSocket = require('ws');
const P2P_PORT = process.env.P2P_PORT || 5001;
const peers = process.env.PEERS ? process.env.split(',') : [];
class P2PServer{
constructor(blockChainInstance){
this.blockChainInstance = blockChainInstance;
this.socket = [];
}
}
function listen(){
const server = new WebSocket.Server({ port: P2P_PORT });
server.on('connection', socket => this.connectSocket(socket));
this.connectToPeers();
};
function connectToPeers(){
peers.forEach(peer => {});
const socket = new WebSocket(peer);
socket.on('open', () => this.connectSocket(socket));
};
function connectSocket(socket){
this.socket.push(socket);
console.log('Socket connected!');
}
module.exports = P2PServer; | 63c2afde80fa7eb0a027ec3e5b24e3bd427c1c35 | [
"JavaScript"
] | 1 | JavaScript | darlandc/blockchain-nodeJS | 3f86acd1fbd121f1341656e4a5f377cdd9e5bc54 | 93a29156e59f2f536a84cfd90e21ed9a2541c485 |
refs/heads/master | <file_sep>// var superagent = require('superagent')
var AccountLoggedin;
var TypeLoggedin;
var tempaccount;
//var TypeReceiver;
//var AccountReceiver;
function create() {
// -------------------------------------
// YOUR CODE GW
// Create user account on server
var name = document.getElementById('createName').value;
var email = document.getElementById('createEmail').value;
var password = document.getElementById('createPassword').value;
var status = document.getElementById('status');
var atype = document.getElementById('CreateAccountType').value;
url = '/account/create/' + name + '/' + email + '/' + password+'/'+atype;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text)
}
})
// -------------------------------------
}
function deposit() {
// -------------------------------------
// YOUR CODE
// Deposit funds user funds on server
// -------------------------------------
var email = document.getElementById('depositEmail').value;
var amount = document.getElementByIdById('depositAmount').value;
var status = document.getElementById('status');
var url = '/account/deposit/' + email + '/' + amount;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text)
}
});
}
function withdraw() {
// -------------------------------------
// YOUR CODE
// Withdraw funds user funds on server
// -------------------------------------
var email = document.getElementById('withdrawEmail').value;
var amount = document.getElementById('withdrawAmount').value;
var status = document.getElementById('status');
var url = '/account/withdraw/' + email + '/' + amount;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text)
}
});
}
function transactions() {
// -------------------------------------
// YOUR CODE
// Get all user transactions
// -------------------------------------
var email = document.getElementById('transactionsEmail').value;
var status = document.getElementById('status');
var url = '/account/transactions/' + email;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text)
}
});
}
function login() {
// -------------------------------------
// YOUR CODE
// Get user balance
// -------------------------------------
var email = document.getElementById('loginEmail').value;
var accountT = document.getElementById('AccountType').value;
var password = document.getElementById('loginPassword').value;
var status = document.getElementById('status');
var url = '/account/login/' + email +'/' + accountT +'/' + password;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
try{
tempaccount = eval ("(" + res.text + ")");
//statusbar.innerHTML = tempaccount.accountT;
if (tempaccount.pendingtransactions.length == 0){
statusbar.innerHTML = "Welcome "+ tempaccount.name;
} else {
statusbar.innerHTML = "Welcome "+ tempaccount.name+" , you have a pending delivery, please check";
}
successlogin();
document.getElementById("status").innerText = "Welcome, please use navigation bar to choose functions, or enter food address to check your delivery."
//status.innerText = "Welcome" + tempaccount.name;
AccountLoggedin = tempaccount.name;
}
catch{
statusbar.innerHTML = JSON.stringify(res.text);
}
}
});
}
function CheckDelivery(){
var url = '/account/checkdelivery/' + AccountLoggedin;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
web3 = new Web3(new Web3.providers.HttpProvider("http://localhost:8545"));
web3.eth.defaultAccount = web3.eth.accounts[0];
var address1 = web3.eth.accounts[0];
console.log(web3.eth.accounts[0]);
var FoodContract = web3.eth.contract([
{
"constant": false,
"inputs": [
{
"name": "_foodName",
"type": "string"
},
{
"name": "_originalFarm",
"type": "string"
}
],
"name": "FoodCreation",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "getFood",
"outputs": [
{
"name": "",
"type": "string"
},
{
"name": "",
"type": "string"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
}
]);
var FoodTrans = FoodContract.at('0xfc6c5083d6d34b8877d90fd79f1ac96358c89ede');
console.log(FoodTrans);
FoodTrans.getFood(function(error, result) {
if (!error) {
document.getElementById("FoodInfo").innerText = JSON.stringify(result[0]+" from "+result[1]);
} else
console.log(error);
});
document.getElementById("status").innerText = JSON.stringify(res.text);
}
});
}
function AcceptFood(){
var url = '/account/acceptfood/' + AccountLoggedin;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
document.getElementById("status").innerText = JSON.stringify(res.text)
}
});
}
function ProduceFood(){
var input = document.getElementById('InputAmount').value;
var output = document.getElementById('OutputAmount').value;
var status = document.getElementById('status');
var url = '/account/producefood/' + AccountLoggedin +'/' + input +'/' + output;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
document.getElementById("status").innerText = JSON.stringify(res.text)
}
});
}
function transhistory(){
var status = document.getElementById('status');
var url = '/account/transhistory/' + AccountLoggedin ;
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
document.getElementById("status").innerText = JSON.stringify(res.text)
}
});
}
function allData() {
// -------------------------------------
// YOUR CODE
// Get all data
// -------------------------------------
var url = '/account/all';
var status = document.getElementById('status');
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text)
}
});
}
function TransferFood(){
var PName = document.getElementById("ProductName").value;
var AccountReceiver = document.getElementById("ReceiverName").value;
console.log(AccountReceiver);
var amount = document.getElementById("TransferAmount").value;
var url = '/account/transferfood/' + AccountLoggedin + '/' + AccountReceiver + '/'+ amount;
var status = document.getElementById('status');
superagent
.get(url)
.end(function(err,res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text);
web3 = new Web3(new Web3.providers.HttpProvider("http://localhost:8545"));
web3.eth.defaultAccount = web3.eth.accounts[0];
var address1 = web3.eth.accounts[0];
console.log(web3.eth.accounts[0]);
var FoodContract = web3.eth.contract([
{
"constant": false,
"inputs": [
{
"name": "_foodName",
"type": "string"
},
{
"name": "_originalFarm",
"type": "string"
}
],
"name": "FoodCreation",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "getFood",
"outputs": [
{
"name": "",
"type": "string"
},
{
"name": "",
"type": "string"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
}
]);
var FoodTrans = FoodContract.at('0xfc6c5083d6d34b8877d90fd79f1ac96358c89ede');
console.log(FoodTrans);
FoodTrans.FoodCreation(PName,AccountLoggedin);
}
});
}
function CheckFood(){
var name = document.getElementById("ResName").value;
var FoodID = document.getElementById("FoodID").value;
var url = '/account/'+ name +'/'+FoodID;
var status = document.getElementById('status');
superagent
.get(url)
.end(function(err, res){
if(err){
console.log(err)
}
else{
console.log(res.body);
status.innerHTML = JSON.stringify(res.text)
}
});
}
<file_sep>var ui = {};
ui.navigation = `
<nav aria-label="breadcrumb">
<ol class="breadcrumb">
<li class="breadcrumb-item active" aria-current="page" style="font-size:18px;font-weight:bold">FoodBlock </a></li>
<li class="breadcrumb-item"><a href="#" onclick="loadLogin()">Account Login</a></li>
<li class="breadcrumb-item"><a href="#" onclick="loadPartners()">Current Partners</a></li>
<li class="breadcrumb-item"><a href="#" onclick="loadCreateAccount()">Become a Partner</a></li>
<li class="breadcrumb-item"><a href="#" onclick="loadCheck()">Check Your Food</a></li>
</ol>
</nav>
`;
ui.createAccount = `
<div class="card text-white bg-primary mb-3" style="max-width: 18rem;">
<div class="card-header">Become a Partner</div>
<div class="card-body">
<p class="card-text">Account Type</p>
<p>
<select id = "CreateAccountType" class="form-control" aria-describedby="basic-addon3">
<option value>Choose...</option>
<option value="Farm">Farm</option>
<option value="Processor">Processor</option>
<option value="Restaurant">Restaurant</option>
</select>
</p>
<p class="card-text"> </p>
<p class="card-text">Name</p>
<input type="text" class="form-control" placeholder="Enter Name" id="createName" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Email address</p>
<input type="text" class="form-control" placeholder="Enter Email" id="createEmail" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Password</p>
<input type="<PASSWORD>" class="form-control" placeholder="Enter Password" id="createPassword" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "create()">Create Account</a>
<div id="status"></div>
</div>
</div>
`;
ui.transferfood = `
<div class="card text-white bg-danger mb-3" style="max-width: 18rem;">
<div class="card-header">Transfer Food</div>
<div class="card-body">
<p class="card-text">Receiver</p>
<input type="text" class="form-control" placeholder="Enter Name" id="ReceiverName" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Product Name</p>
<input type="text" class="form-control" placeholder="Enter Name" id="ProductName" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Amount</p>
<input type="number" class="form-control" placeholder="Enter amount" value = 0 id="TransferAmount" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "TransferFood()">Transfer</a>
<div id="status"></div>
</div>
</div>`;
ui.login = `
<div class="card text-white bg-secondary mb-3" style="max-width: 18rem;">
<div class="card-header">Login</div>
<div class="card-body">
<p class="card-text">Account Type</p>
<p>
<select id = "AccountType" class="form-control" aria-describedby="basic-addon3">
<option value>Choose...</option>
<option value = "Farm">Farm</option>
<option value = "Processor">Processor</option>
<option value = "Restaurant">Restaurant</option>
</select>
</p>
<p> </p>
<p class="card-text"> </p>
<p class="card-text">Email</p>
<input type="text" class="form-control" placeholder="Enter Email" id="loginEmail" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Password</p>
<input type="<PASSWORD>" class="form-control" placeholder="Enter Password" id="loginPassword" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "login()">Login</a>
<div id="status"></div>
</div>
</div>
`;
ui.checkFood = `
<div class="card text-white bg-warning mb-3" style="max-width: 18rem;">
<div class="card-header">Check you Food</div>
<div class="card-body">
<p class="card-text"> </p>
<p class="card-text">Food ID</p>
<input type="text" class="form-control" placeholder="Enter Food ID" id="FoodID" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Restaurant Name</p>
<input type="text" class="form-control" placeholder="Enter Restaurant Name" id="ResName" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "CheckFood()">Check</a>
<div id="status"></div>
</div>
</div>`;
ui.partners = `
<div class="card text-white bg-danger mb-3" style="max-width: 40rem;">
<div class="card-header">Current Partners</div>
<div class="card-body">
<table class="table">
<thead>
<tr>
<th scope="col">Farms</th>
<th scope="col">Processors</th>
<th scope="col">Restaurants</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row">LGO Farm</th>
<td>Lean Processor</td>
<td>Random Restaurant</td>
</tr>
<tr>
<th scope="row">LGO Farm 1</th>
<td>Lean Processor 1</td>
<td>Random Restaurant 1</td>
</tr>
</tbody>
</table>
<div id="status"></div>
</div>
</div>`;
ui.transactions = `
<div class="card text-white bg-info mb-3" style="max-width: 18rem;">
<div class="card-header">Transactions</div>
<div class="card-body">
<p class="card-text">Email</p>
<input type="text" class="form-control" placeholder="Enter Email" id="transactionsEmail" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "transactions()">Show Transactions</a>
<div id="status"></div>
</div>
</div>`;
ui.balance = `
<div class="card text-white bg-success mb-3" style="max-width: 18rem;"><div class="card-header">Balance</div>
<div class="card-body">
<p class="card-text">Email</p>
<input type="text" class="form-control" placeholder="Enter Email" id="balanceEmail" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "balance()">Show Balance</a>
<div id="status"></div>
</div>
</div>`;
ui.default = `
<div class="card bg-light mb-3" style="max-width: 18rem;">
<div class="card-header">FoodBlock</div>
<div class="card-body">
<h5 class="card-title">Welcome to FoodBlock</h5>
<p class="card-text">Secure your food!
<div>
<img src="FoodChain Logo.png" height="240">
</div>
</div>`;
ui.allData = `
<div class="card" style="width: 18rem;">
<div class="card-body">
<h5 class="card-title">All Data In Store</h5>
<a href="#" class="btn btn-primary" onclick = "allData()">Show All Data</a>
</div>
<div id="status"></div>
</div>`;
ui.navbar = `
<nav aria-label="breadcrumb">
<ol class="breadcrumb">
<li class="breadcrumb-item active" aria-current="page" style="font-size:18px;font-weight:bold">FoodBlock </a></li>
<li class="breadcrumb-item"><a href="#" onclick="loadTranferFood()">Sell Food</a></li>
<li class="breadcrumb-item"><a href="#" onclick="LoadProduce()">Produce Food</a></li>
<li class="breadcrumb-item"><a href="#" onclick="ViewHist()">View History</a></li>
<li class="breadcrumb-item"><a href="#" onclick="CheckInventory()">Check Inventory</a></li>
<li class="breadcrumb-item"><a href="#" onclick="Logout()">Logout</a></li>
</ol>
</nav>
`;
ui.welcome = `
<div class="card text-white bg-secondary mb-3" style="max-width: 36rem;">
<div class="card-header">Welcome</div>
<div class="card-body">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "CheckDelivery()">CheckDelivery</a>
<a href="#" class="btn btn-light" onclick = "AcceptFood()">AcceptFood</a>
<div id="FoodInfo"></div>
<div id="status" size="20"></div>
<img id="loader" src="https://loading.io/spinners/double-ring/lg.double-ring-spinner.gif">
</div>
</div>
`;
ui.produce = `
<div class="card text-white bg-danger mb-3" style="max-width: 18rem;">
<div class="card-header">Produce Food</div>
<div class="card-body">
<p class="card-text">Input</p>
<input type="number" class="form-control" placeholder="Enter amount" value = 0 id="InputAmount" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<p class="card-text">Output</p>
<input type="number" class="form-control" placeholder="Enter amount" value = 0 id="OutputAmount" aria-describedby="basic-addon3">
<p class="card-text"> </p>
<a href="#" class="btn btn-light" onclick = "ProduceFood()">Produce</a>
<div id="status"></div>
</div>
</div>`;
ui.history = `
<div class="card text-white bg-success mb-3" style="max-width: 18rem;"><div class="card-header">History</div>
<div class="card-body">
<a href="#" class="btn btn-light" onclick = "transhistory()">Show History</a>
<div id="status"></div>
</div>
</div>`;
var target = document.getElementById('target');
var navigation = document.getElementById('navigation');
navigation.innerHTML = ui.navigation;
var statusbar = document.getElementById('statusbar');
var loadCreateAccount = function(){
target.innerHTML = ui.createAccount;
};
var loadLogin = function(){
target.innerHTML = ui.login;
loginstatus();
};
var status = document.getElementById('status');
var loadCheck = function(){
target.innerHTML = ui.checkFood;
};
var loadPartners = function(){
target.innerHTML = ui.partners;
};
//
var loadTranferFood = function(){
target.innerHTML = ui.transferfood;
};
//
// var loadBalance = function(){
//
// target.innerHTML = ui.balance;
// };
//
var successlogin = function(){
navigation.innerHTML = ui.navbar;
target.innerHTML = ui.welcome;
};
var loginstatus = function(){
status.innerHTML = 'welcome';
}
var defaultModule = function(){
navigation.innerHTML = ui.navigation;
target.innerHTML = ui.default;
};
//
// var loadAllData = function(){
//
// target.innerHTML = ui.allData;
// };
var Logout = function(){
defaultModule();
statusbar.innerHTML = null;
Accountloggedin = null;
TypeLoggedin = null;
};
var LoadProduce = function(){
target.innerHTML=ui.produce;
};
var ViewHist = function(){
target.innerHTML=ui.history;
};
defaultModule();
<file_sep>// setup server GW
// YOUR CODE
// setup directory used to serve static files
// YOUR CODE
var express = require('express');
var low = require('lowdb');
var fs = require('lowdb/adapters/FileSync');
var adapter = new fs('db.json');
var db = low(adapter);
var app = express();
var cors = require('cors');
app.use(cors());
app.use(express.static('public'));
// setup data store
// YOUR CODE
db.defaults({accounts:[]}).write();
// required data store structure
// YOUR CODE
/*
{
accounts:[
{name : '',
email : '',
balance : 0,
password : '',
transactions: []}
]
}
*/
app.get('/account/create/:name/:email/:password/:atype', function (req, res) {
// YOUR CODE
// Create account route
var email = req.params.email;
var name = req.params.name;
var password = req.params.<PASSWORD>;
var atype = req.params.atype;
var account = {
accountT: atype,
name: name,
email:email,
InputInventory: 0,
OutputInventory: 0,
password: <PASSWORD>,
pendingtransactions: [],
producehistory: [],
receivehistory: [],
transactions: [],
};
db.get('accounts')
.push(account)
.write();
console.log('The account for ' + name + 'was created successfully');
res.send('The account for ' + name + 'was created successfully, yay');
// return success or failure string (NEED TO ADD THIS)
});
//app.get('/account/login/:accountT/:email/:password', function (req, res) {
//
// // YOUR CODE
// // Login user - confirm credentials
// // If success, return account object
// var accountT = req.params.accountT
// var email = req.params.email;
// var password = <PASSWORD>;
// // NEED TO UPDATE FOR PASSWORD VERIFICATION************************************************
// var account = db.get('accounts').find({email:req.params.email}).value();
// res.send('welcome '+account.name+ ' your balance is = '+current_account.balance);
// // If fail, return null
//
//});
app.get('/account/get/:email', function (req, res) {
// YOUR CODE
// Return account based on email
var email = req.params.email;
var account = getAccount(email, {email: email});
console.log('Retrieved account ' + account.name);
res.send(account);
});
getAccount = function(email, query){
var account = db.get('accounts')
.find(query)
.value();
return account;
}
getAccountName = function(name, query){
var account = db.get('accounts')
.find(query)
.value();
return account;
}
app.get('/account/deposit/:email/:amount', function (req, res) {
// YOUR CODE
// Deposit amount for email
// return success or failure string
var email = req.params.email;
var depositamount = req.params.amount;
var account = getAccount(email, {email: email});
var currentbalance = parseInt(account.balance);
var newbalance = currentbalance + parseInt(depositamount);
var transactions = account.transactions;
transactions.push({Time: new Date(), Action: 'Deposit', Amount: depositamount, Description: "Deposited $" + depositamount})
account = db.get('accounts')
.find({email: email})
.assign({balance: newbalance, transactions: transactions})
.write();
console.log('Balance for ' + account.name + 'was credited with ' + depositamount + '. The new balance is ' + account.balance);
res.send('Balance for ' + account.name + 'was credited with ' + depositamount + '. The new balance is ' + account.balance);
});
app.get('/account/withdraw/:email/:amount', function (req, res) {
// YOUR CODE
// Withdraw amount for email
// return success or failure string
var email = req.params.email;
var withdrawamount = req.params.amount;
var account = getAccount(email, {email:email});
// need failure string
var currentbalance = parseInt(account.balance);
var newbalance = currentbalance - parseInt(withdrawamount);
var transactions = account.transactions;
transactions.push({Time: new Date(), Action: 'Withdraw', Amount: withdrawamount, Description: "Withdrew $" + depositamount})
account = db.get('accounts')
.find({email:email})
.assign({balance: newbalance, transactions: transactions})
.write();
console.log('Balance for ' + account.name + 'was reduced by ' + withdrawamount + '. The new balance is ' + account.balance);
res.send('Balance for ' + account.name + 'was reduced by ' + withdrawamount + '. The new balance is ' + account.balance);
});
app.get('/account/transactions/:email', function (req, res) {
// YOUR CODE
// Return all transactions for account
var email = req .params.email;
var account = getAccount(email, {email:email});
res.send(account.transactions);
});
app.get('/account/all', function (req, res) {
var accounts = deb.get('accounts')
.value();
res.send(accounts);
// YOUR CODE
// Return data for all accounts
});
app.get('/account/login/:email/:accountT/:password', function(req,res) {
var email = req.params.email;
var accountT = req.params.accountT;
var password = req.params.password;
var account = getAccount(email, {accountT: accountT, email: email, password: <PASSWORD> });
//loginaccount = account;
if(account == null) {
console.log('Unable to login account ' + email + '. Ensure your account type, email and password are correct');
res.send('Unable to login account ' + email + '. Ensure your account type, email and password are correct');
//loginaccount = account;
}
else {
console.log('Successfully logged into account ' + account.name);
// successlogin();
res.send(JSON.stringify(account));
}});
app.get('/account/checkdelivery/:name', function(req,res) {
var accountname = req.params.name;
var account = getAccountName(accountname, {name:accountname});
//loginaccount = account;
if(account == null) {
console.log('Something is wrong ' + accountname + ' is not found.');
res.send('Something is wrong ' + accountname + ' is not found.');
//loginaccount = account;
}
else {
console.log('Check food sucess.' + account.name);
// successlogin();
res.send(JSON.stringify(account.pendingtransactions));
}});
app.get('/account/acceptfood/:name', function(req,res) {
var accountname = req.params.name;
var account = getAccountName(accountname, {name:accountname});
//loginaccount = account;
if(account == null) {
console.log('Something is wrong ' + accountname + ' is not found.');
res.send('Something is wrong ' + accountname + ' is not found.');
//loginaccount = account;
}
else {
var waitingtransactions = account.pendingtransactions;
var receivetransactions = account.receivehistory;
receivetransactions.push(waitingtransactions[0]);
waitingtransactions=[];
account = db.get('accounts')
.find({name: accountname})
.assign({pendingtransactions: waitingtransactions, receivehistory: receivetransactions})
.write();
console.log('Food received.' + account.name);
// successlogin();
res.send('Food received.');
}}
);
app.get('/account/transhistory/:name', function(req,res) {
var accountname = req.params.name;
var account = getAccountName(accountname, {name:accountname});
//loginaccount = account;
if(account == null) {
console.log('Something is wrong ' + accountname + ' is not found.');
res.send('Something is wrong ' + accountname + ' is not found.');
//loginaccount = account;
}
else {
console.log('Food received.' + account.receivehistory);
// successlogin();
res.send(account.receivehistory);
}}
);
app.get('/account/producefood/:name/:input/:output', function(req,res) {
var accountname = req.params.name;
var account = getAccountName(accountname, {name:accountname});
var inputamount = parseInt(req.params.input);
var outputamount = parseInt(req.params.output);
var currentinput = account.InputInventory;
var currentoutput = account.OutputInventory;
var newinput = currentinput - inputamount;
var newoutput = currentoutput + outputamount;
var producehistory1 = account.producehistory;
var producefrom = account.receivehistory[producehistory1.length];
waitingtransactions=[];
//loginaccount = account;
if(account == null) {
console.log('Something is wrong ' + accountname + ' is not found.');
res.send('Something is wrong ' + accountname + ' is not found.');
//loginaccount = account;
}
else {
console.log('Check food sucess.' + account.name);
// successlogin();
console.log(producefrom);
if (account.accountT == "Farm"){
producehistory1.push({Time: new Date(), Action: 'Produce', Amount: inputamount, Owner: accountname, InputDetail: []});
account = db.get('accounts')
.find({name: accountname})
.assign({InputInventory: newinput, OutputInventory: newoutput,producehistory:producehistory1})
.write();
res.send("Successfully produced.");
}
else {
producehistory1.push({Time: new Date(), Action: 'Produce', Amount: inputamount, Owner: accountname, InputDetail: producefrom});
account = db.get('accounts')
.find({name: accountname})
.assign({InputInventory: newinput, OutputInventory: newoutput,producehistory:producehistory1})
.write();
res.send("Successfully produced.");
}
}});
app.get('/account/transferfood/:name/:receiver/:amount',function(req,res) {
var accountname = req.params.name;
var receivername = req.params.receiver;
var transferamount = parseInt(req.params.amount);
console.log(accountname);
console.log(receivername);
var account = getAccountName(accountname, {name:accountname});
//var account = getAccount("<EMAIL>", {email:"<EMAIL>"});
console.log(account);
var receiveraccount = getAccountName(receivername, {name:receivername});
console.log(receiveraccount);
if(account == null){
console.log('Something is wrong, cannot find account ' + accountname + '.');
res.send('Something is wrong, cannot find account ' + accountname + '.');
}
else{
if(receiveraccount == null) {
if(account.accountT == "Restaurant") {
var currentoutput = account.OutputInventory;
var newoutput = currentoutput - transferamount;
if(newoutput <0) {
console.log('Not enough inventory, please make more.');
res.send('Not enough inventory, please make more.');
} else {
// transfer food to next stuff
var sendertransactions = account.transactions;
var detail = account.producehistory[account.producehistory.length-1];
console.log(account.transactions.length-1);
sendertransactions.push({Time: new Date(), Action: 'Sell', Amount: transferamount, Seller: accountname, Receiver: receivername, Detail: detail});
console.log(detail);
account = db.get('accounts')
.find({name: accountname})
.assign({OutputInventory: newoutput, transactions: sendertransactions})
.write();
console.log('Successfully sell ' + receivername + ' ' + transferamount+' amount of burgers.');
res.send('Successfully sell ' + receivername + ' ' + transferamount+' amount of burgers. ID is '+ account.transactions.length);
}
}
else {
console.log('Cannot find account ' + receivername + '.');
res.send('Cannot find account ' + receivername + '.');
}
}
else{
if(account.accountT == "Farm"){
if(receiveraccount.accountT != "Processor") {
console.log('You are sending it to a wrong placee, ' + receivername + 'is not a Processor.');
res.send('You are sending it to a wrong placee, ' + receivername + 'is not a Processor.');
} else {
var currentoutput = account.OutputInventory;
var newoutput = currentoutput - transferamount;
if(newoutput <0) {
console.log('Not enough inventory, please load more.');
res.send('Not enough inventory, please load more.');
} else {
var receivetransaction = receiveraccount.pendingtransactions;
var receivercurrentinput = receiveraccount.InputInventory;
var receivernewinput = receivercurrentinput+transferamount;
// transfer food to next stuff
var sendertransactions = account.transactions;
var detail = account.producehistory[account.producehistory.length-1];
console.log(account.transactions.length-1);
console.log(detail);
sendertransactions.push({Time: new Date(), Action: 'Sell', Amount: transferamount, Seller: accountname, Receiver: receivername, Detail: detail});
account = db.get('accounts')
.find({name: accountname})
.assign({OutputInventory: newoutput, transactions: sendertransactions})
.write();
receivetransaction.push({Time: new Date(), Action: 'Receive food', Amount: transferamount, TransferFrom: accountname, Receiver: receivername, Detail: detail});
account = db.get('accounts')
.find({name: receivername})
.assign({InputInventory: receivernewinput, pendingtransactions: receivetransaction})
.write();
console.log('Successfully sent ' + receivername + ' ' + transferamount+' amount of grain.');
res.send('Successfully sent ' + receivername + ' ' + transferamount+' amount of grain.');
}
}
} else {
if (account.accountT == "Processor"){
if(receiveraccount.accountT != "Restaurant") {
console.log('You are sending it to a wrong placee, ' + receivername + 'is not a Restaurant.');
res.send('You are sending it to a wrong placee, ' + receivername + 'is not a Restaurant.');
} else {
var currentoutput = account.OutputInventory;
var newoutput = currentoutput - transferamount;
if(newoutput <0) {
console.log('Not enough inventory, please make more.');
res.send('Not enough inventory, please make more.');
} else {
var receivetransaction = receiveraccount.pendingtransactions;
var receivercurrentinput = receiveraccount.InputInventory;
var receivernewinput = receivercurrentinput+transferamount;
// transfer food to next stuff
var sendertransactions = account.transactions;
var detail = account.producehistory[account.producehistory.length-1];
console.log(account.transactions.length-1);
console.log(detail);
sendertransactions.push({Time: new Date(), Action: 'Sell', Amount: transferamount, Seller: accountname, Receiver: receivername, Detail: detail});
console.log(detail);
account = db.get('accounts')
.find({name: accountname})
.assign({OutputInventory: newoutput, transactions: sendertransactions})
.write();
receivetransaction.push({Time: new Date(), Action: 'Receive food', Amount: transferamount, TransferFrom: accountname, Receiver: receivername, Detail: detail});
account = db.get('accounts')
.find({name: receivername})
.assign({InputInventory: receivernewinput, pendingtransactions: receivetransaction})
.write();
console.log('Successfully sent ' + receivername + ' ' + transferamount+' amount of bread.');
res.send('Successfully sent ' + receivername + ' ' + transferamount+' amount of bread.');
}
}
} else {
}
}
}
}
}
)
//need to start up server
app.get('/account/producefood/:name/:inputamount/:outputamount',function(req,res) {
var inputamount = parseInt(req.params.inputamount);
var ouputamount = parseInt(req.params.ouputamount);
var accountname = req.params.name;
console.log(accountname);
var account = getAccountName(accountname, {name:accountname});
if (account.accountT == "Farm"){
var inventory
account = db.get('accounts')
.find({name: accountname})
.assign({OutputInventory: newoutput, transactions: sendertransactions})
.write();
}
})
app.get('/account/:name/:foodid',function(req,res) {
var foodid = parseInt(req.params.foodid);
var accountname = req.params.name;
console.log(accountname);
var account = getAccountName(accountname, {name:accountname});
if(account == null) {
console.log('Unable find account ' + account + '.');
res.send('Unable find account ' + account + '.');
//loginaccount = account;
}
else {
console.log('Successfully checked' + account.name);
// successlogin();
res.send(JSON.stringify(account.transactions[foodid-1]));
}});
app.listen(8000, function(){
console.log('Listening at port 8000')
})
| dca029cab08b8b289c852c9c974aa176cd78dd7d | [
"JavaScript"
] | 3 | JavaScript | lzhtongji/FinalFoodChain | 8b918e8bb0005c3373de48e8ed8fc4ab595e8008 | bbcb3bdca75a287f42b6d2be51fd5c36eeb08775 |
refs/heads/master | <repo_name>github-power/ArkNightsTool<file_sep>/src/me/xmgspace/ArkNightsTool/Main.java
package me.xmgspace.ArkNightsTool;
import java.awt.AWTException;
import java.awt.EventQueue;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.Robot;
import javax.swing.JFrame;
import java.awt.Toolkit;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JPanel;
import javax.swing.LayoutStyle.ComponentPlacement;
import javax.swing.JScrollPane;
import javax.swing.UnsupportedLookAndFeelException;
import javax.swing.border.TitledBorder;
import javax.swing.JLabel;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.JButton;
import java.awt.event.ActionListener;
import java.awt.event.InputEvent;
import java.awt.event.ActionEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Random;
public class Main {
// 游戏版本
public static final String VERSION = "2021.10.05";
private JFrame frmArknightstool;
private JTextField startTextField_1_1;
private JTextField startTextField_1_2;
private JTextField startTextField_2_1;
private JTextField startTextField_2_2;
private JTextField overTextField_1;
private JTextField overTextField_2;
private JTextField sanityTextField;
private JTextField onceSanityTextField;
private JTextField timeTextField;
private JTextField mousePositionTextField_1;
private JTextField mousePositionTextField_2;
private JTextArea logTextArea;
// 模拟点击的进程
private Start thread;
// 日志内容
private String logData = "";
// 用户输入数据
private int[] inputData = new int[10];
/**
* Launch the application.
*/
public static void main(String[] args) {
Main frame = new Main();
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
frame.frmArknightstool.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
try {
BufferedReader in = new BufferedReader(new FileReader("ArkNightsData"));
frame.startTextField_1_1.setText(in.readLine());
frame.startTextField_1_2.setText(in.readLine());
frame.startTextField_2_1.setText(in.readLine());
frame.startTextField_2_2.setText(in.readLine());
frame.overTextField_1.setText(in.readLine());
frame.overTextField_2.setText(in.readLine());
frame.onceSanityTextField.setText(in.readLine());
frame.timeTextField.setText(in.readLine());
in.close();
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// 循环,获取当前鼠标位置并显示
while (true) {
Point point = MouseInfo.getPointerInfo().getLocation();
frame.mousePositionTextField_1.setText(Double.toString(point.getX()));
frame.mousePositionTextField_2.setText(Double.toString(point.getY()));
try {
Thread.sleep(25);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* Create the application.
*/
public Main() {
try {
javax.swing.UIManager.setLookAndFeel(javax.swing.UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnsupportedLookAndFeelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
initialize();
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frmArknightstool = new JFrame();
frmArknightstool.setAlwaysOnTop(true);
frmArknightstool.setResizable(false);
frmArknightstool.setIconImage(
Toolkit.getDefaultToolkit().getImage(Main.class.getResource("/me/xmgspace/ArkNightsTool/icon.jpg")));
frmArknightstool.setTitle("ArkNightsTool");
frmArknightstool.setBounds(100, 100, 665, 577);
frmArknightstool.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
JPanel initPanel = new JPanel();
initPanel.setBorder(
new TitledBorder(null, "\u521D\u59CB\u5316PRTS", TitledBorder.LEADING, TitledBorder.TOP, null, null));
JPanel controlPanel = new JPanel();
controlPanel.setBorder(
new TitledBorder(null, "\u63A7\u5236\u4E2D\u5FC3", TitledBorder.LEADING, TitledBorder.TOP, null, null));
JScrollPane logScrollPane = new JScrollPane();
GroupLayout groupLayout = new GroupLayout(frmArknightstool.getContentPane());
groupLayout.setHorizontalGroup(groupLayout.createParallelGroup(Alignment.LEADING).addGroup(groupLayout
.createSequentialGroup().addContainerGap()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(initPanel, GroupLayout.PREFERRED_SIZE, 436, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(controlPanel, GroupLayout.PREFERRED_SIZE, 186, GroupLayout.PREFERRED_SIZE)
.addContainerGap(15, Short.MAX_VALUE))
.addGroup(groupLayout.createSequentialGroup().addComponent(logScrollPane).addGap(17)))));
groupLayout.setVerticalGroup(groupLayout.createParallelGroup(Alignment.LEADING).addGroup(groupLayout
.createSequentialGroup().addContainerGap()
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(initPanel, GroupLayout.PREFERRED_SIZE, 349, GroupLayout.PREFERRED_SIZE)
.addComponent(controlPanel, GroupLayout.PREFERRED_SIZE, 349, GroupLayout.PREFERRED_SIZE))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addComponent(logScrollPane, GroupLayout.PREFERRED_SIZE, 151, GroupLayout.PREFERRED_SIZE)
.addContainerGap(208, Short.MAX_VALUE)));
//
logTextArea = new JTextArea();
logTextArea.setEditable(false);
logScrollPane.setViewportView(logTextArea);
JButton startButton = new JButton("开始行动!");
startButton.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent arg0) {
//
startOperate();
}
});
startButton.setToolTipText("开始行动!");
JButton stopButton = new JButton("有内鬼终止行动");
stopButton.addMouseListener(new MouseAdapter() {
// @SuppressWarnings({ "deprecation", "unused" })
@Override
public void mouseClicked(MouseEvent arg0) {
if (thread != null) {
thread.kill();
} else {
logAppend("没有行动在执行");
}
}
});
stopButton.setToolTipText("有内鬼终止行动");
JButton exitButton = new JButton("退出程序");
exitButton.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
}
});
exitButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
System.exit(0);
}
});
exitButton.setToolTipText("退出程序");
GroupLayout gl_controlPanel = new GroupLayout(controlPanel);
gl_controlPanel.setHorizontalGroup(gl_controlPanel.createParallelGroup(Alignment.LEADING)
.addGroup(gl_controlPanel.createSequentialGroup().addGap(26)
.addGroup(gl_controlPanel.createParallelGroup(Alignment.LEADING)
.addComponent(exitButton, GroupLayout.DEFAULT_SIZE, 117, Short.MAX_VALUE)
.addComponent(stopButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE,
Short.MAX_VALUE)
.addComponent(startButton, GroupLayout.DEFAULT_SIZE, 117, Short.MAX_VALUE))
.addGap(31)));
gl_controlPanel.setVerticalGroup(gl_controlPanel.createParallelGroup(Alignment.LEADING)
.addGroup(gl_controlPanel.createSequentialGroup().addGap(50).addComponent(startButton).addGap(71)
.addComponent(stopButton).addGap(73).addComponent(exitButton)
.addContainerGap(62, Short.MAX_VALUE)));
controlPanel.setLayout(gl_controlPanel);
JLabel startLabel_1 = new JLabel("开始行动1按钮位置");
startLabel_1.setToolTipText("开始行动1按钮位置");
JLabel startLabel_2 = new JLabel("开始行动2按钮位置");
startLabel_2.setToolTipText("开始行动2按钮位置");
JLabel timeLabel = new JLabel("通关时间(秒)");
timeLabel.setToolTipText("通关时间");
JLabel sanityLabel = new JLabel("当前理智");
sanityLabel.setToolTipText("当前理智");
JLabel onceSanityLabel = new JLabel("通关消耗理智");
JLabel overLabel = new JLabel("结束行动点击位置");
overLabel.setToolTipText("结束行动点击位置");
JLabel mousePositionLabel = new JLabel("当前鼠标指针位置");
startTextField_1_1 = new JTextField();
startTextField_1_1.setColumns(10);
startTextField_1_2 = new JTextField();
startTextField_1_2.setColumns(10);
startTextField_2_1 = new JTextField();
startTextField_2_1.setColumns(10);
startTextField_2_2 = new JTextField();
startTextField_2_2.setColumns(10);
overTextField_1 = new JTextField();
overTextField_1.setColumns(10);
overTextField_2 = new JTextField();
overTextField_2.setColumns(10);
sanityTextField = new JTextField();
sanityTextField.setColumns(10);
onceSanityTextField = new JTextField();
onceSanityTextField.setColumns(10);
timeTextField = new JTextField();
timeTextField.setColumns(10);
mousePositionTextField_1 = new JTextField();
mousePositionTextField_1.setEditable(false);
mousePositionTextField_1.setColumns(10);
mousePositionTextField_2 = new JTextField();
mousePositionTextField_2.setEditable(false);
mousePositionTextField_2.setColumns(10);
GroupLayout gl_initPanel = new GroupLayout(initPanel);
gl_initPanel.setHorizontalGroup(gl_initPanel.createParallelGroup(Alignment.LEADING)
.addGroup(gl_initPanel.createSequentialGroup().addContainerGap()
.addGroup(gl_initPanel.createParallelGroup(Alignment.LEADING)
.addGroup(gl_initPanel.createSequentialGroup().addComponent(startLabel_1).addGap(18)
.addComponent(startTextField_1_1, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addGap(18).addComponent(startTextField_1_2, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(gl_initPanel.createSequentialGroup()
.addGroup(gl_initPanel.createParallelGroup(Alignment.LEADING)
.addComponent(startLabel_2).addComponent(overLabel)
.addComponent(sanityLabel).addComponent(onceSanityLabel)
.addComponent(timeLabel).addComponent(mousePositionLabel))
.addGap(18)
.addGroup(gl_initPanel.createParallelGroup(Alignment.LEADING)
.addGroup(gl_initPanel.createSequentialGroup()
.addComponent(mousePositionTextField_1,
GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE)
.addGap(18).addComponent(mousePositionTextField_2,
GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE))
.addComponent(timeTextField, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(onceSanityTextField, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(
sanityTextField, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addGroup(gl_initPanel.createSequentialGroup()
.addComponent(overTextField_1, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addGap(18).addComponent(
overTextField_2, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(gl_initPanel.createSequentialGroup()
.addComponent(startTextField_2_1, GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addGap(18).addComponent(startTextField_2_2,
GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE)))))
.addContainerGap(144, Short.MAX_VALUE)));
gl_initPanel.setVerticalGroup(gl_initPanel.createParallelGroup(Alignment.LEADING).addGroup(gl_initPanel
.createSequentialGroup().addGap(21)
.addGroup(gl_initPanel.createParallelGroup(Alignment.BASELINE).addComponent(startLabel_1)
.addComponent(startTextField_1_1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE)
.addComponent(startTextField_1_2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(gl_initPanel.createParallelGroup(Alignment.BASELINE).addComponent(startLabel_2)
.addComponent(startTextField_2_1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE)
.addComponent(startTextField_2_2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(gl_initPanel.createParallelGroup(Alignment.BASELINE).addComponent(overLabel)
.addComponent(overTextField_1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE)
.addComponent(overTextField_2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(gl_initPanel
.createParallelGroup(Alignment.BASELINE).addComponent(sanityLabel).addComponent(sanityTextField,
GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(gl_initPanel.createParallelGroup(Alignment.BASELINE).addComponent(onceSanityLabel)
.addComponent(onceSanityTextField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(gl_initPanel
.createParallelGroup(Alignment.BASELINE).addComponent(timeLabel).addComponent(timeTextField,
GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(gl_initPanel.createParallelGroup(Alignment.BASELINE).addComponent(mousePositionLabel)
.addComponent(mousePositionTextField_1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE)
.addComponent(mousePositionTextField_2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE))
.addContainerGap(49, Short.MAX_VALUE)));
initPanel.setLayout(gl_initPanel);
frmArknightstool.getContentPane().setLayout(groupLayout);
}
/**
* 模拟鼠标点击进程类
*
*
*/
class Start extends Thread {
/**
* orderStatus 进程状态 指令标记,true 进程继续执行,false 进程终止 random 产生随机数值,用于 点击位置随机位移 robot
* 模拟鼠标点击 count 执行游戏次数 cycle 游戏运行周期 datas 整型数组, 依次为 三个坐标值(3*2),
* 当前理智,单次理智消耗,游戏运行周期,游戏运行次数
*/
private boolean orderStatus = false;
private Random random = new Random();
private Robot robot = null;
int sanity;
int count;
int cycle_time;
int[] datas;
/**
* 传入用户输入数据
*
* @param args
*/
public Start(int[] args) {
datas = args;
this.count = args[9];
this.cycle_time = args[8];
}
public void run() {
// 允许进程执行
this.orderStatus = true;
try {
// 实例化 Robot
robot = new Robot();
} catch (AWTException e) {
e.printStackTrace();
logTextArea.setText(logTextArea.getText() + "发生异常 程序终止\n");
logTextArea.setCaretPosition(logTextArea.getText().length());
// 发生异常,禁止进程继续执行
this.orderStatus = false;
return;
}
logAppend(String.format("===Version %s===", VERSION));
logAppend("===\t任务开始\t ===");
logAppend(String.format("当前 %d 理智,每次消耗 %d 理智,可以执行 %d 次", this.datas[6], this.datas[7], this.datas[9]));
int i = 0;
for (i = 1; this.orderStatus && i <= count; i++) {
logAppend("===开始执行第" + i + "次任务===");
// 点击第一个坐标点
this.moveAndPressMouse(this.datas[0], this.datas[1]);
robot.delay(5000 + random.nextInt(500));
// 点击第二个坐标点
this.moveAndPressMouse(this.datas[2], this.datas[3]);
// 等待游戏运行
{
int _one_time = cycle_time;
String current_log = logTextArea.getText();
while (_one_time > 0) {
robot.delay(1000);
_one_time--;
String log_str = String.format("等待游戏运行..., 剩余时间 %d 秒", _one_time);
logLive(log_str);
}
}
robot.delay(random.nextInt(10000));
// 点击第三个坐标点
this.moveAndPressMouse(this.datas[4], this.datas[5]);
logAppend("回到选关界面");
sanity = (count - i) * this.datas[7];
setSanity("" + sanity);
logAppend(String.format("===第 %d 次任务执行完毕,剩余%d理智 ===", i, sanity));
robot.delay(8000 + random.nextInt(2000));
}
if (!this.orderStatus) {
logAppend("*****操作暂停*****");
}
}
public void kill() {
this.orderStatus = false;
}
public void moveAndPressMouse(int a, int b) {
int _start1_1 = a + random.nextInt(50);
int _start1_2 = b + random.nextInt(15);
robot.mouseMove(_start1_1, _start1_2);
robot.mouseMove(_start1_1, _start1_2);
robot.mouseMove(_start1_1, _start1_2);
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.delay(150 + random.nextInt(10));
robot.mouseRelease(InputEvent.BUTTON1_MASK);
logAppend(String.format("点击坐标: %d , %d ", _start1_1, _start1_2));
System.out.println(_start1_1 + " " + _start1_2);
}
}
public void startOperate() {
if (checkInput()) {
try {
thread = new Start(inputData);
Thread.sleep(1000);
thread.start();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
logAppend("程序异常!!!!!");
}
}
}
/**
* 检查输入窗口,保存到全局变量
* @return true 全部输入 符合规则
*/
public boolean checkInput() {
boolean result = false;
int count = 0;
int start1_1 = 0;
int start1_2 = 0;
int start2_1 = 0;
int start2_2 = 0;
int over1 = 0;
int over2 = 0;
int cycle_time = 999;
int spend_resource = 0;
int current_resource = 0;
int step = 0;
String[] warningMessages = { "请输入正确的理智数值!", "请输入正确的位置!", "请输入正确的通关时间" };
try {
current_resource = Integer.parseInt(sanityTextField.getText());
spend_resource = Integer.parseInt(onceSanityTextField.getText());
count = current_resource / spend_resource;
step++;
start1_1 = Integer.parseInt(startTextField_1_1.getText());
start1_2 = Integer.parseInt(startTextField_1_2.getText());
start2_1 = Integer.parseInt(startTextField_2_1.getText());
start2_2 = Integer.parseInt(startTextField_2_2.getText());
over1 = Integer.parseInt(overTextField_1.getText());
over2 = Integer.parseInt(overTextField_2.getText());
step++;
cycle_time = Integer.parseInt(timeTextField.getText());
result = true;
} catch (Exception e) {
// 根据 step 标记 打印 提示信息
logAppend(warningMessages[step]);
} finally {
// 记录数据
if (step == 2 && result) {
inputData[0] = start1_1;
inputData[1] = start1_2;
inputData[2] = start2_1;
inputData[3] = start2_2;
inputData[4] = over1;
inputData[5] = over2;
inputData[6] = current_resource;
inputData[7] = spend_resource;
inputData[8] = cycle_time;
inputData[9] = count;
for (int i : inputData) {
System.out.println(i);
}
}
}
return result;
}
/**
* 最后端 添加 打印信息 自带换行 保存到全局变量
* @param logStr 要打印的信息
*/
public void logAppend(String logStr) {
logData = logData + logStr + "\n";
logTextArea.setText(logData);
logTextArea.setCaretPosition(logData.length());
}
/**
* 打印信息, 不保存到全局变量
* @param logStr
*/
public void logLive(String logStr) {
logTextArea.setText(logData + logStr + "\n");
logTextArea.setCaretPosition(logTextArea.getText().length());
}
/**
* 清楚打印信息
*/
public void logClear() {
logData = "";
logTextArea.setText(logData);
logTextArea.setCaretPosition(0);
}
/**
* 设置 当前理智 数值
* @param value
*/
private void setSanity(String value) {
this.sanityTextField.setText(value);
}
}
<file_sep>/README.md
# ArkNightsTool
## A tool for ArkNights
暑假开始的那几天用java写的一个程序,能够让你~~自动回家(住在1-7了)~~自动帮你刷精神污染的1-7或者其他图的工具。

写的很仓促 也很简单,GUI部分用windowbuilder利用swing实现。核心部分使用到了Robot()类。
怕封号,咱自己一直不敢用...用过几天倒也没啥事。胆大的尝试下吧!
## 修改
* 对控制中心进行了修改, 修改结果如下
<img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAosAAAI9CAYAAABSeTJ/AAAgAElEQVR4nO3dfbBc5X0n+F9fhA3hxVcGgzHY2MISyRCIw8uMGZdinMslcYmsC9Ykla3dmDBeG1Q7ASpl4mFnU7uZDLHJzEi4psB4vRqcqqy3AoFxDIXDRWMY7PJuJOQEYo+NkECJXwFbbd5f1fuH1KJ1dH59zunb956W7udTpdLtc57zPE+3wf3lebudk046qdfr9SIz7B5Mml27dsWyZcva7gYAS8B7/vUX4vpfOeH1Cz/6WlzzO38Uf1vj2bf+/ufjz37tHa9feO3R+Kt/sTb+4w8i4uJPx+2X/3Ic2b/37Dfj5ov/IP5yWIUNnnnP738+rh9s++V/iL/+tx+Nf/+NPa8/8Z9i46+/I6ampqLT6UTnxBNP3CcNCo4cyHq<KEY>">
* 实现了 `有内鬼终止行动` 按钮的方法
> 点击按钮后, 完成此次行动再终止行动
* 部分重复调用的语句封装进了方法
* 修改了日志输出,在游戏运行时添加了倒计时读秒
| 3435205a974097370d73b20ec56c42b8e1e37732 | [
"Markdown",
"Java"
] | 2 | Java | github-power/ArkNightsTool | 985e4a68fa264092c20d01e04296ddb06195440e | aaf0ddf1eadecc686e9a278794a1db31b4b3921b |
refs/heads/master | <repo_name>pranav-ap/inference-algos<file_sep>/resolution.py
from knowledge_base import KnowledgeBase
from itertools import combinations
from copy import deepcopy
from sentence_engine import (
to_cnf, extract_proposition_symbols, negate,
smart_tokenize)
def pl_resolve(c1, c2):
sentence = c1 + ' or ' + c2
tokens = smart_tokenize(sentence)
symbols = extract_proposition_symbols(sentence)
resolvents = set()
for s in symbols:
temp_tokens = deepcopy(tokens)
if s in tokens and negate(s) in tokens:
temp_tokens.remove(s)
temp_tokens.remove(negate(s))
resolvents.add(temp_tokens)
return [' or '.join(r) for r in resolvents]
def pl_resolution(kb, alpha):
cnf_form = to_cnf("{} and not ( {} )".format(kb.as_sentence(), alpha))
clauses = {clause for clause in cnf_form.split('and')}
new = set()
while True:
for c1, c2 in combinations(clauses, 2):
resolvents = pl_resolve(c1, c2)
if '' in resolvents:
return True
new.add(resolvents)
if new.issubset(clauses):
return False
clauses.update(new)
def main():
kb = KnowledgeBase()
kb.tell('not p11')
kb.tell('b11 <=> ( p12 or p21 )')
kb.tell('b21 <=> ( p11 or p22 or p31 )')
kb.tell('not b11')
kb.tell('b21')
alpha = 'not p12'
print(pl_resolution(kb, alpha))
if __name__ == '__main__':
main()
<file_sep>/chaining.py
from knowledge_base import KnowledgeBase
from utils import operators
def get_premise_and_conclusion(sentence):
if '=>' in sentence:
premise, conclusion = sentence.split('=>')
else:
premise, conclusion = '', sentence
return premise.strip(), conclusion.strip()
def get_axioms(kb):
return {s for s in kb.sentences if len(s.split()) == 1}
def pl_fc_entails(kb, query):
axioms = get_axioms(kb)
no_of_uninferred_premises = dict()
for sentence in kb.sentences:
premise, conclusion = get_premise_and_conclusion(sentence)
no_of_uninferred_premises[sentence] = sum([1 for s in premise.split() if s not in operators])
while axioms:
axiom = axioms.pop()
if axiom == query:
return True
index = 0
while index < len(kb.sentences):
sentence = kb.sentences[index]
premise, conclusion = get_premise_and_conclusion(sentence)
if axiom in premise:
no_of_uninferred_premises[sentence] -= 1
if no_of_uninferred_premises[sentence] == 0:
axioms.add(conclusion)
kb.tell(conclusion)
index += 1
return False
def main():
kb = KnowledgeBase()
kb.tell('a')
kb.tell('b')
kb.tell('a and b => l')
kb.tell('a and p => l')
kb.tell('b and l => m')
kb.tell('l and m => p')
kb.tell('p => q')
query = 'q'
print('size of kb before inference : {}'.format(kb.size()))
print('Entails ? {}'.format(pl_fc_entails(kb, query)))
print('size of kb after inference : {}'.format(kb.size()))
print('kb : {}'.format(kb.as_sentence()))
if __name__ == '__main__':
main()
<file_sep>/README.md
# Inference Strategies
[](https://www.codacy.com/app/pranavap96/inference-strategies?utm_source=github.com&utm_medium=referral&utm_content=pranav-ap/inference-strategies&utm_campaign=Badge_Grade)
<file_sep>/knowledge_base.py
class KnowledgeBase():
def __init__(self):
self.sentences = []
def size(self):
return len(self.sentences)
def tell(self, sentence):
self.sentences.append(sentence)
def retract(self, sentence):
self.sentences.remove(sentence)
def as_sentence(self):
single_sentence = ' ) and ( '.join(self.sentences)
single_sentence = '( ' + single_sentence + ' )'
return single_sentence
def main():
pass
if __name__ == '__main__':
main()<file_sep>/sentence_engine.py
from anytree import RenderTree, findall, LevelOrderIter, PostOrderIter
from copy import deepcopy
from utils import (
Operator, Argument, logical_precedence, operators,
Not, And, Or, Implies, Bidirectional
)
def prepare(sentence):
# remove extra whitespaces
sentence = ' '.join(sentence.split(' '))
return sentence
def dumb_tokenize(sentence):
return prepare(sentence).split(' ')
def smart_tokenize(sentence):
previous = ''
tokens = []
for current in dumb_tokenize(sentence) + ['$']:
if previous == '':
previous = current
continue
if previous != 'not':
tokens.append(previous)
previous = current
elif current not in operators:
tokens.append(previous + ' ' + current)
previous = ''
else:
tokens.append(previous)
previous = current
return tokens
def negate(word):
return word if word.startswith('not') else 'not {}'.format(word)
def extract_proposition_symbols(sentence):
return {s for s in dumb_tokenize(sentence) if s not in operators}
def complement_exists(pos_symbol, sentence):
neg_symbol = negate(pos_symbol)
tokens = smart_tokenize(sentence)
return pos_symbol in tokens and neg_symbol in tokens
def is_disjunction(sentence):
return not sentence.startswith('not (') and all(token not in ['and', '=>', '<=>'] for token in sentence)
def is_conjunction(sentence):
return not sentence.startswith('not (') and all(token not in ['or', '=>', '<=>'] for token in sentence)
def expression_tree_to_sentence(root):
sentence = [node.symbol for node in PostOrderIter(root)]
print(sentence)
sentence.reverse()
sentence = postfix_to_infix(sentence)
return sentence
def postfix_to_infix(postfix):
stack = []
while postfix:
p = postfix.pop()
if p not in operators:
stack.append(p)
else:
if p == 'not':
child = stack.pop()
stack.append('( not {} )'.format(child))
else:
lhs, rhs = stack.pop(), stack.pop()
stack.append('( {} {} {} )'.format(lhs, p, rhs))
return stack[0]
def infix_to_postfix(sentence):
stack = []
output = []
for token in dumb_tokenize(sentence):
if token not in operators:
output.append(token)
elif token == '(':
stack.append(token)
elif token == ')':
# pop until a '(' is popped
while stack and stack[-1] != '(':
output.append(stack.pop())
stack.pop()
else:
while stack and stack[-1] != '(' and logical_precedence[token] <= logical_precedence[stack[-1]]:
output.append(stack.pop())
stack.append(token)
# leftover
while stack:
output.append(stack.pop())
return ' '.join(output)
def get_expression_tree(sentence):
postfix = infix_to_postfix(sentence)
postfix = dumb_tokenize(postfix)
root = node = None
stack = []
for token in postfix:
if token == 'not':
child = stack.pop()
node = Not(child=child)
elif token in ['or', 'and', '=>', '<=>']:
rhs, lhs = stack.pop(), stack.pop()
if token == 'or':
node = Or(lhs=lhs, rhs=rhs)
elif token == 'and':
node = And(lhs=lhs, rhs=rhs)
elif token == '=>':
node = Implies(lhs=lhs, rhs=rhs)
elif token == '<=>':
node = Bidirectional(lhs=lhs, rhs=rhs)
else:
node = Argument(symbol=token)
stack.append(node)
root = stack[0] if stack[0] else root
return root
def get_death_row(root, type):
return findall(root, filter_=lambda node: isinstance(node, type))
def eliminate_bidirectional(root):
deathrow = get_death_row(root, Bidirectional)
for inmate in deathrow:
child_1, child_2 = deepcopy(inmate.children)
left_implication = Implies(lhs=child_1, rhs=child_2)
child_1, child_2 = deepcopy(inmate.children)
right_implication = Implies(lhs=child_2, rhs=child_1)
and_node = And(lhs=left_implication, rhs=right_implication)
if inmate.is_root:
and_node.parent = None
root = and_node
else:
if inmate == inmate.parent.lhs:
inmate.parent.lhs = and_node
else:
inmate.parent.rhs = and_node
and_node.parent = inmate.parent
inmate.parent = None
return root
def eliminate_implication(root):
deathrow = get_death_row(root, Implies)
for inmate in deathrow:
left_child, right_child = inmate.children
not_node = Not(child=left_child)
or_node = Or(lhs=not_node, rhs=right_child)
or_node.parent = inmate.parent
if inmate.is_root:
or_node.parent = None
root = or_node
else:
if inmate == inmate.parent.lhs:
inmate.parent.lhs = or_node
else:
inmate.parent.rhs = or_node
or_node.parent = inmate.parent
inmate.parent = None
return root
def move_not_inwards(root):
deathrow = get_death_row(root, Not)
deathrow = [n for n in deathrow if isinstance(n.child, And) or isinstance(n.child, Or)]
for inmate in deathrow:
operator = inmate.child
operator.parent = None
left_child, right_child = operator.children
not_node_1 = Not(child=left_child)
not_node_2 = Not(child=right_child)
new_op = And(lhs=not_node_1, rhs=not_node_2) if isinstance(operator, Or) else Or(lhs=not_node_1, rhs=not_node_2)
if inmate.is_root:
new_op.parent = None
root = new_op
else:
if inmate == inmate.parent.lhs:
inmate.parent.lhs = new_op
else:
inmate.parent.rhs = new_op
new_op.parent = inmate.parent
inmate.parent = None
for node in LevelOrderIter(root):
if isinstance(node, Not) and isinstance(node.child, Not):
node.child.parent = node.parent
node.parent = None
return root
def move_disjunctions_inwards(root):
deathrow = get_death_row(root, Or)
deathrow = [n for n in deathrow if isinstance(n.lhs, And) or isinstance(n.rhs, And)]
for inmate in deathrow:
child1, child2 = inmate.children
and_operator= child1 if isinstance(child1, And) else child2
other_child = child2 if child2 != and_operator else child1
other_child2 = deepcopy(other_child)
and_child1, and_child2 = and_operator.children
or1 = Or(lhs=and_child1, rhs=other_child)
or2 = Or(lhs=and_child2, rhs=other_child2)
and_node = And(parent=inmate.parent, lhs=or1, rhs=or2)
if inmate.is_root:
and_node.parent = None
root = and_node
else:
if inmate == inmate.parent.lhs:
inmate.parent.lhs = and_node
else:
inmate.parent.rhs = and_node
inmate.parent = None
return root
def to_cnf(sentence):
root = get_expression_tree(sentence)
root = eliminate_bidirectional(root)
root = eliminate_implication(root)
root = move_not_inwards(root)
root = move_disjunctions_inwards(root)
print(RenderTree(root))
sentence = expression_tree_to_sentence(root)
return sentence
def main():
print(to_cnf('( b11 <=> ( p12 or ( w and not p21 ) ) )'))
# print(to_cnf('a and ( b or ( d and e ) )'))
if __name__ == '__main__':
main()
<file_sep>/utils.py
from anytree import NodeMixin
operators = ['not', 'and', 'or', '(', ')', '=>', '<=>']
logical_precedence = {
'not': 7,
'and': 6,
'or': 5,
'(': 4,
')': 3,
'=>': 2,
'<=>': 1
}
class Operator(NodeMixin):
def __init__(self, parent=None):
self.parent = parent
self.value = None
self.symbol = ''
def calculate(self):
raise NotImplementedError()
def __repr__(self):
return 'Operator'
class UnaryOperator(Operator):
def __init__(self, child, parent=None):
Operator.__init__(self, parent=parent)
child.parent = self
self.child = child
def calculate(self):
raise NotImplementedError()
def __repr__(self):
return 'UnaryOperator'
class Not(UnaryOperator):
def __init__(self, child, parent=None):
UnaryOperator.__init__(self, child, parent=parent)
self.symbol = 'not'
def calculate(self):
self.value = not self.child.value
def __repr__(self):
return 'not {}'.format(self.child.value)
class BinaryOperator(Operator):
def __init__(self, lhs, rhs, parent=None):
Operator.__init__(self, parent=parent)
lhs.parent = rhs.parent = self
self.lhs = lhs
self.rhs = rhs
def calculate(self):
raise NotImplementedError()
def __repr__(self):
return 'BinaryOperator'
class And(BinaryOperator):
def __init__(self, lhs, rhs, parent=None):
BinaryOperator.__init__(self, lhs, rhs, parent=parent)
self.symbol = 'and'
def calculate(self):
self.value = self.lhs.value and self.rhs.value
def __repr__(self):
return '{} and {}'.format(self.lhs.value, self.rhs.value)
class Or(BinaryOperator):
def __init__(self, lhs, rhs, parent=None):
BinaryOperator.__init__(self, lhs, rhs, parent=parent)
self.symbol = 'or'
def calculate(self):
self.value = self.lhs.value or self.rhs.value
def __repr__(self):
return '{} or {}'.format(self.lhs.value, self.rhs.value)
class Implies(BinaryOperator):
def __init__(self, lhs, rhs, parent=None):
BinaryOperator.__init__(self, lhs, rhs, parent=parent)
self.symbol = '=>'
def calculate(self):
self.value = False if self.lhs.value and not self.rhs.value else True
def __repr__(self):
return '{} => {}'.format(self.lhs.value, self.rhs.value)
class Bidirectional(BinaryOperator):
def __init__(self, lhs, rhs, parent=None):
BinaryOperator.__init__(self, lhs, rhs, parent=parent)
self.symbol = '<=>'
def calculate(self):
self.value = self.lhs.value == self.rhs.value
def __repr__(self):
return '{} <=> {}'.format(self.lhs.value, self.rhs.value)
class Argument(NodeMixin):
def __init__(self, parent=None, symbol=None, value=None):
self.parent = parent
self.symbol = symbol
self.value = value
def __repr__(self):
return 'Argument {}'.format(self.symbol)
def main():
pass
if __name__ == '__main__':
main()
<file_sep>/truth_table_enumeration.py
from knowledge_base import KnowledgeBase
from anytree import LevelOrderIter
from sentence_engine import get_expression_tree, extract_proposition_symbols
from utils import Argument
from copy import deepcopy
def is_pl_true(sentence, model):
root = get_expression_tree(sentence)
execution_order = [node for node in LevelOrderIter(root)]
execution_order.reverse()
for node in execution_order:
if isinstance(node, Argument):
node.value = model.get(node.symbol)
else:
node.calculate()
return execution_order[-1].value
def check_all(kb, alpha, symbols, model):
if not symbols:
if all(is_pl_true(s, model) for s in kb.sentences):
print('model {} satisfies kb'.format(model))
res = is_pl_true(alpha, model)
print('Does it satisfy alpha ? {}'.format(res))
print()
return res
return True
p = symbols.pop()
symbols1 = deepcopy(symbols)
model1 = deepcopy(model)
model1[p] = True
symbols2 = deepcopy(symbols)
model2 = deepcopy(model)
model2[p] = False
return (
check_all(kb, alpha, symbols1, model1)
and
check_all(kb, alpha, symbols2, model2)
)
def check_if_entails(kb, alpha):
symbols = extract_proposition_symbols(alpha)
symbols.update([s for sentence in kb.sentences for s in extract_proposition_symbols(sentence)])
return check_all(kb, alpha, symbols, {})
def main():
kb = KnowledgeBase()
kb.tell('not p11')
kb.tell('b11 <=> ( p12 or p21 )')
kb.tell('b21 <=> ( p11 or p22 or p31 )')
kb.tell('not b11')
kb.tell('b21')
alpha = 'not p12'
result = check_if_entails(kb, alpha)
print('kb entails alpha ? {}'.format(result))
if __name__ == '__main__':
main()
| 9bc58ce3bebe01ac112166412687e8828b5b0c7d | [
"Markdown",
"Python"
] | 7 | Python | pranav-ap/inference-algos | 8fb35ebb5ad903470d855554cb962aa774a88446 | f8509ccd212934f4b6b8417a73c591f2f36c4897 |
refs/heads/master | <repo_name>sagarthecook/mongoapp<file_sep>/src/main/java/com/desire3d/progma/command/repository/SQLDefinationRepositoryImpl.java
package com.desire3d.progma.command.repository;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import com.desire3d.progma.fw.command.repository.SQLDefinationRepository;
import com.desire3d.progma.model.NewFieldDefination;
import com.desire3d.progma.model.SQLDefination;
@Repository
public class SQLDefinationRepositoryImpl implements SQLDefinationRepository {
@Autowired
private PersistenceManagerFactory pmf;
@Override
public SQLDefination saveOrUpdate(SQLDefination sqlDefination) {
PersistenceManager pm = pmf.getPersistenceManager();
return pm.makePersistent(sqlDefination);
}
@Override
public NewFieldDefination saveNewFieldDefination(NewFieldDefination newFieldDefination) {
PersistenceManager pm = pmf.getPersistenceManager();
return pm.makePersistent(newFieldDefination);
}
@Override
public SQLDefination findById(String id) {
PersistenceManager pm = pmf.getPersistenceManager();
return pm.getObjectById(SQLDefination.class, id);
}
@Override
public NewFieldDefination findByFieldId(String id) {
PersistenceManager pm = pmf.getPersistenceManager();
return pm.getObjectById(NewFieldDefination.class, id);
}
}
<file_sep>/src/main/java/com/desire3d/progma/model/Parameter.java
package com.desire3d.progma.model;
import javax.jdo.annotations.EmbeddedOnly;
import javax.jdo.annotations.FetchGroup;
import javax.jdo.annotations.PersistenceCapable;
import javax.jdo.annotations.Persistent;
@PersistenceCapable(detachable = "true")
@EmbeddedOnly
@FetchGroup(name = "Parameter",members = {
@Persistent(name = "position"),
@Persistent(name = "value"),
@Persistent(name = "dynamic")
})
public class Parameter {
private int position;
private String value;
private boolean dynamic;
public Parameter(){
}
public Parameter(int position, String value) {
this.position = position;
this.dynamic = value.toString().equalsIgnoreCase("?");
if(!this.dynamic)
this.value = value;
}
public String getValue(){
return this.value;
}
public boolean isDynamic(){
return this.dynamic;
}
@Override
public String toString() {
return "Postion ["+position+"] Value ["+value+"] Dynamic ["+dynamic+"]";
}
/**
* @return the position
*/
public int getPosition() {
return position;
}
/**
* @param position the position to set
*/
public void setPosition(int position) {
this.position = position;
}
/**
* @param value the value to set
*/
public void setValue(String value) {
this.value = value;
}
/**
* @param dynamic the dynamic to set
*/
public void setDynamic(boolean dynamic) {
this.dynamic = dynamic;
}
}<file_sep>/src/main/java/com/desire3d/progma/model/NewFieldDefination.java
package com.desire3d.progma.model;
import javax.jdo.annotations.PersistenceCapable;
import javax.jdo.annotations.Persistent;
import javax.jdo.annotations.PrimaryKey;
@PersistenceCapable(table = "field_definition", detachable = "true")
public class NewFieldDefination {
@PrimaryKey
@Persistent(column = "_id", customValueStrategy = "uuid")
public String id;
@Persistent
private String newfieldname;
public String getNewfieldname() {
return newfieldname;
}
public void setNewfieldname(String newfieldname) {
this.newfieldname = newfieldname;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}<file_sep>/src/main/java/com/desire3d/progma/Config.java
package com.desire3d.progma;
import java.util.Properties;
import javax.jdo.JDOHelper;
import javax.jdo.PersistenceManagerFactory;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
@EnableAutoConfiguration(exclude = { MongoAutoConfiguration.class, MongoDataAutoConfiguration.class })
public class Config {
@Bean
public PersistenceManagerFactory getPersistenceManagerFactory() {
Properties prop = new Properties();
prop.setProperty("javax.jdo.option.ConnectionURL", "mongodb:localhost:27017/progmadb");
prop.setProperty("javax.jdo.option.ConnectionUserName", "progmadbuser");
prop.setProperty("javax.jdo.option.ConnectionPassword", <PASSWORD>");
prop.setProperty("javax.jdo.option.Mapping", "mongodb");
prop.setProperty("datanucleus.schema.autoCreateAll", "true");
PersistenceManagerFactory pmf = JDOHelper.getPersistenceManagerFactory(prop);
return pmf;
}
@Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}
| 8f94eab008959d3c1c3cf10ae6bbf27f19d85fb4 | [
"Java"
] | 4 | Java | sagarthecook/mongoapp | 59d2b9fb0ca54f1d4d781b145339d68806855d2d | 7393fe54940fe7e2111db279ff0500288aa74715 |
refs/heads/master | <file_sep>import React from "react";
export const Hours = () => {
return (
<div className="card">
<div className="hours">
Horas de Apertura
<br />
LUN 09:00 AM - 06:00 PM <br />
MAR 09:00 AM - 07:10 PM <br />
MIÉ 09:00 AM - 07:20 PM <br />
JUE 09:00 AM - 07:35 PM <br />
VIE 09:00 AM - 12:30 PM <br />
SÁB Cerrado DOM Cerrado
</div>
</div>
);
};
<file_sep>import React from "react";
export const Makeup = () => {
return (
<div className="card">
<div className="makeup">
Salixa Makeup
<br />• Maquillajes • Peinados • Cejas • Pestañas • Depilación
</div>
</div>
);
};
<file_sep>import React from "react";
export const Contact = () => {
return (
<div className="card">
<div className="Contact">
Contactar con nosotros
<br />
+18099727214
<br />
<EMAIL>
<br />
Calle 47-A, , <NAME>, DO
<br />
httpswwwinstagramcomsalixamakeup.simplybook.me
</div>
</div>
);
};
<file_sep>import logo from "./logo.svg";
import "./App.css";
import Nav from "./NavBar/Nav";
import {Hours} from "./Information/Hours";
import {Contact} from "./Information/Contact";
import {Makeup} from "./Information/Makeup";
import {Information} from "./Information/Information";
function App() {
return (
<div className="App">
<Nav />
<Information />
</div>
);
}
export default App;
<file_sep>import React, {Component} from "react";
export default class Nav extends Component {
render() {
return (
<div>
<a href="#">Inicio</a>
<a href="#">Opiniones</a>
<a href="#">Galeria</a>
<a href="#">Mis Reservas</a>
</div>
);
}
}
| d67ee60b15541782094933b075e68ce7198ae244 | [
"JavaScript"
] | 5 | JavaScript | vincentservio/salixa-demo | 60919b067bf05746cd07bde6cda8ea337889d167 | f6ec036d742a9072f431d3169f157ef74de7a9f7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.